././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/0000775000175100017510000000000015033036146013431 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/.coveragerc0000664000175100017510000000013515033036143015546 0ustar00mylesmyles[run] branch = True source = masakari omit = masakari/tests/* [report] ignore_errors = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/.mailmap0000664000175100017510000000013115033036143015042 0ustar00mylesmyles# Format is: # # ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/.pre-commit-config.yaml0000664000175100017510000000104415033036143017706 0ustar00mylesmyles--- repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - id: trailing-whitespace - id: mixed-line-ending args: ['--fix', 'lf'] exclude: '.*\.(svg)$' - id: check-byte-order-marker - id: check-merge-conflict - id: debug-statements - id: check-yaml files: .*\.(yaml|yml)$ - repo: https://opendev.org/openstack/hacking rev: 7.0.0 hooks: - id: hacking additional_dependencies: [] exclude: '^(doc|releasenotes|tools)/.*$' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/.stestr.conf0000664000175100017510000000006515033036143015700 0ustar00mylesmyles[DEFAULT] test_path=./masakari/tests/unit top_dir=./ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/.yamllint0000664000175100017510000000010715033036143015256 0ustar00mylesmyles--- extends: default ignore: | .tox/ rules: line-length: disable ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/.zuul.yaml0000664000175100017510000000662415033036143015377 0ustar00mylesmyles--- - job: name: masakari-functional-devstack parent: devstack description: | Masakari functional tests job based on devstack (single-node). pre-run: playbooks/devstack/pre.yaml run: - playbooks/devstack/run-devstack.yaml - playbooks/devstack/run-tox.yaml post-run: playbooks/devstack/post.yaml required-projects: - openstack/masakari - openstack/masakari-monitors irrelevant-files: - ^api-ref/ - ^doc/ - ^masakari/tests/unit/ - ^releasenotes/ - ^\.coveragerc$ - ^\.gitignore$ - ^\.gitreview$ - ^\.mailmap$ - ^\.yamllint$ - ^\.zuul\.yaml$ - ^LICENSE$ - ^.*[^/]+\.rst$ - ^bindep\.txt$ vars: devstack_plugins: masakari: https://opendev.org/openstack/masakari devstack_services: # NOTE(yoctozepto): Disable controller's cinder (c-* services). # It is enabled by default by the parent job (devstack) definition. cinder: false # NOTE(yoctozepto): Disable controller's swift (s-* services). # It is enabled by default by the parent job (devstack) definition. swift: false host-vars: controller: tox_install_siblings: false tox_envlist: functional group-vars: subnode: devstack_services: # Override Masakari services on non-controllers (overrides default # plugin settings in Masakari). Only monitors are left. masakari-api: false masakari-engine: false # NOTE(yoctozepto): Disable compute's cinder (c-* services). # It is enabled by default by the parent job (devstack) definition. cinder: false - job: name: masakari-functional-devstack-multinode parent: masakari-functional-devstack description: | Masakari functional tests job based on devstack (two-node). nodeset: openstack-two-node-noble - job: name: masakari-functional-devstack-multinode-jammy parent: masakari-functional-devstack description: | Masakari functional tests job based on devstack (Jammy two-node). nodeset: openstack-two-node-jammy - job: name: masakari-functional-devstack-ipv6 # NOTE(yoctozepto): The devstack job does not support multinode IPv6 yet. parent: masakari-functional-devstack description: | Masakari functional tests job based on devstack (single-node). IPv6 variant. run: - playbooks/devstack/run-devstack.yaml - playbooks/devstack/run-verify-config-is-ipv6-only.yaml - playbooks/devstack/run-tox.yaml vars: devstack_localrc: SERVICE_IP_VERSION: 6 TUNNEL_IP_VERSION: 6 SERVICE_HOST: "" - project: queue: masakari templates: - check-requirements - openstack-cover-jobs - openstack-python3-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 - periodic-stable-jobs - periodic-jobs-with-oslo-master check: jobs: - openstack-tox-linters - masakari-functional-devstack-multinode - masakari-functional-devstack-multinode-jammy - masakari-functional-devstack-ipv6 gate: jobs: - openstack-tox-linters - masakari-functional-devstack-multinode - masakari-functional-devstack-multinode-jammy - masakari-functional-devstack-ipv6 periodic: jobs: - masakari-functional-devstack-multinode ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/AUTHORS0000664000175100017510000001045215033036143014500 0ustar00mylesmylesAbhishek Kekane Andreas Jaeger Arthur Dayne Balazs Gibizer Ben Nemec Chuck Short Corey Bryant Cyril Roelandt Daisuke Suzuki Deepak Dinesh Bhor Dmitriy Rabotyagov Dmitriy Rabotyagov Doug Hellmann Dr. Jens Harbott Elod Illes Ghanshyam Ghanshyam Mann Hangdong Zhang Hervé Beraud Ian Wienand Ivan Anfimov James Page James Page Jay Faulkner Jean-Philippe Evrard Jegor van Opdorp Ji-Wei Kengo Takahara Kengo Takahara Louie KWAN Mark Goddard Mitya_Eremeev Neha Alhat Nguyen Hai Nguyen Hung Phuong OpenStack Release Bot Pavlo Shchelokovskyy Radosław Piliszek Rikimaru Honjo Rikimaru Honjo Sampath Priyankara Sean McGinnis Shilpa Shilpa Devharakar Stephen Finucane Takahiro Izumi Takashi Kajinami Takashi Kajinami Takashi Kajinami Takashi NATSUME Takashi Natsume Tobias Urdin Van Hung Pham Vu Cong Tuan Xing Zhang XinxinShen Yaguo Zhou YeHaiyang <6161910042@vip.jiangnan.edu.cn> Yukinori Sagara akhiljain23 avnish bhagyashris binhong.hua caoyuan chenjiao deepakmourya dengzhaosen dineshbhor ericxiett ghanshyam ghanshyam hussainchachuliya jacky06 jayashri bidwe juhoson lcsong liyingjun ljhuang lkwan melissaml minruigao nicolas.parquet niraj singh nirajsingh niuke niwa.keiji.z03 openstack pandatt pengyuesheng pooja jadhav poojajadhav rajat29 ricolin sampathP sapd shangxiaobj shilpa shilpa.devharakar songwenping sue sunjia suzhengwei suzhengwei tpatil tpatil uchenily wangkuntian wu.chunyang wu.shiming wuting zhangbailin zhangboye zhangmeng zhaoleilc <15247232416@163.com> zhoulinhui ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/CONTRIBUTING.rst0000664000175100017510000000113715033036143016071 0ustar00mylesmylesThe source repository for this project can be found at: https://opendev.org/openstack/masakari Pull requests submitted through GitHub are not monitored. To start contributing to OpenStack, follow the steps in the contribution guide to set up and use Gerrit: https://docs.openstack.org/contributors/code-and-documentation/quick-start.html Bugs should be filed on Launchpad: https://bugs.launchpad.net/masakari For more specific information about contributing to this repository, see the masakari contributor guide: https://docs.openstack.org/masakari/latest/contributor/contributing.html ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/ChangeLog0000664000175100017510000004735315033036143015214 0ustar00mylesmylesCHANGES ======= * Move test base into tests directory * HA notification * pre-commit: Bump versions * add pyproject.toml to support pip 23.1 * Drop support for Python 3.8, 3.9 * setup: Remove pbr's wsgi\_scripts * Add new masakari.wsgi module * Bump hacking * Remove SQLALCHEMY\_WARN\_20 * Update master for stable/2023.2 * Update master for stable/2025.1 * Replace oslo\_utils.encodeutils.exception\_to\_unicode 19.0.0 ------ * Replace deprecated configure\_auth\_token\_middleware * Make functional jobs voting * Do not use network for server in functional tests * Use separate access creds in functional tests * Fix logging in functional tests * Remove default override for config options policy\_file * Adapt tests to new messages from jsonschema 4.23.0 * [devstack] use role member instead of Member * reno: Update master for unmaintained/2023.1 * Remove workaround for eventlet bug #592 * Fix document rendering and update 2025.1 cycle testing * Update master for stable/2024.2 * Fix a typo and links in the Notifications doc 18.0.0 ------ * Ensure migration script use correct alembic table * Replace deprecated oslo\_db.concurrency.TpoolDbapiWrapper * reno: Update master for unmaintained/zed * pre-commit: Bump versions, switch to native hacking plugin * Remove SQLAlchemy tips jobs * Update master for stable/2024.1 * reno: Update master for unmaintained/xena * reno: Update master for unmaintained/wallaby * reno: Update master for unmaintained/victoria * reno: Update master for unmaintained/yoga 17.0.0 ------ * tests: Fix syntax error in hacking test * Update python classifier in setup.cfg * enable check-requirements * remove sqlalchemy-migrate requirement * Add job to test with SQLAlchemy master (2.x) * db: Don't rely on implicit autocommit * db: Don't use legacy select() calling style * db: Don't use strings to indicate paths * db: Don't pass strings to Connection.execute() * tests: Resolve various warnings * tests: Enable SQLAlchemy 2.0 deprecation warnings * db: Remove legacy migrations * db: Migrate to alembic * tests: Validate number of rows \_before\_ purging * db: Sync alembic, sqlalchemy-migrate migrations * db: Add initial alembic migration * db: Wire up for auto-generation * db: Add initial alembic configuration * db: Remove configurable backends * pre-commit: Update dependencies * fix ci broken 16.0.0 ------ * Fix compatability with new oslo.messaging * Move functional tests to Jammy; focal unsupported * install dependencies when installing masakari * Fix bindep.txt for python 3.11 job(Debian Bookworm) * log when set host on maintenance * extend wait period for process notification * add pre-commit * use looping call with timeout * releasenote for Masakari VMove * Update master for stable/2023.1 15.0.0 ------ * vmoves table purge support * add api-ref for masakari vmoves * update api microversion for masakari vmoves * vm moves for host notification * Use new get\_rpc\_client API from oslo.messaging * Fix tox4 issues * Fix notfication stuck into running status when timeout * Handle host notification without host status * [CI] Move queue setting to project level * Switch to 2023.1 Python3 unit tests and generic template name * Update master for stable/zed 14.0.0 ------ * Fix stopping stopped instances * add notification\_uuid into log message * Cleanup py27 support * Replace abc.abstractproperty with property and abc.abstractmethod * update zuul jod: add TUNNEL\_IP\_VERSION * DLM to fix host notification race * Replace python-dev with python3-dev in bindep * Update python testing as per zed cycle testing runtime * [CI] Install dependencies for docs target * Add Python3 zed unit tests * Update master for stable/yoga 13.0.0 ------ * Fix compatibility with oslo.context >= 4.0.0 (one more again) * Fix compatibility with oslo.context >= 4.0.0 (again) * Fix compatibility with oslo.context >= 4.0.0 * Updating python testing classifier as per Yoga testing runtime * Fix auth\_url setting for devstack * Add Python3 yoga unit tests * Update master for stable/xena 12.0.0 ------ * Enable tls-proxy support for test job * Make functional tests consume all config * [DevStack] Use xtrace * [CI] Let Masakari jobs be used from other projects * [CI] Gate on voting jobs * [DevStack] Do not use the sample process list * Generate PDF documentation * Set "disabled reason" for compute service * tests: Remove use of 'oslo\_db.sqlalchemy.test\_base' * Fix Masakari API to properly return error codes * Fix an error in the document * Fix a spelling mistakes in unit test * [train][goal][CI] Define IPv6-only testing job * Fix notification status in notification.inc * [CI] Clear an overridden param * [CI] Fix the functional job * [CI] Update ignored-files * Add the functional job to periodic * [community goal] Update contributor documentation * uuid in log information * Replace getargspec with getfullargspec * Make unit tests compatible with SQLAlchemy 1.4 * Ussuri+ is python3 only and update python to python3 * Drop support for SQLite < 3.7 * Fix a wrong comment on masakari api * setup.cfg: Replace dashes with underscores * [TrivialFix] Improve one method's name * [CI] Drop external CI * Finish switch from http.client to HTTPStatus * Use py3 as the default runtime for tox * Add Python3 xena unit tests * Update master for stable/wallaby 11.0.0 ------ * Fix segment enabled json examples * Fix some errors in the document * [CI] Run bashate * Use oslo.context from\_dict unmodified * Assign all aggregates to reserved\_host * Fix response of microversions API * [devstack] Stop polluting masakarimonitors src dir * Fix functional.test\_hosts.test\_list * Add support for system-scoped tokens * Enable database migration unit tests * Fix segment update validation schema for "enabled" * Remove duplicated DEFAULT\_API\_VERSION * Make "create segment" http api unit tests more thorough * Clean up segments http api unit tests * Fix versioned schema validation * add enabled to segment * update docs for enabled to segment * Simplify endpoints created by devstack plugin * [docs] Add "release notes" contributors' section * [docs] Add docs on code structure * Allow segment description to be multiline * Add missed renos * Revert "remove py37" * update docs * remove unicode from code * remove py37 * add host failure reovery method in Masakari doc * Fix endless periodic looping call after failed evacuation * Modified recovery flow for stopped instance * [goal] Deprecate the JSON formatted policy file * [CI] Add periodic jobs * Drop lower-constraints * Update TOX\_CONSTRAINTS\_FILE * Replace deprecated UPPER\_CONSTRAINTS\_FILE variable * [docs] Quick fixes around contribution process * Import HTTPStatus instead of http\_client * Add Python3 wallaby unit tests * Update master for stable/victoria 10.0.0 ------ * Search in nova services instead of hypervisors * Drop Babel from reqs * [CI] Ignore files not relevant for the functional tests * [tests] Remove dead code (matchers) * [Part7]Remove all usage of six library * [Part6]Remove six * [Part5]Remove six * [Part4]Remove six * [goal] Migrate testing to ubuntu focal * Constrain docs deps by upper-constraints * Use some better linting * Fix files to pass linting * Fix tox for api-ref * Fix tox for py38 and lower-contraints * [CI] Fix gate - multiple fixes (Focal/Py38 and policy) * remove future from lower-constraints * [Part3]Remove six * [Part2]Remove six * [Part1]Remove six * [docs] Add missing control\_attributes * Remove the unused coding style modules * Add ha\_enabled\_instance\_metadata\_key config option * Fix exception notification with no trace * Fix CA file for nova client * Remove monotonic usage * Switch from unittest2 compat methods to Python 3.x methods * Use uwsgi binary from path * Fix typo in api docs for sort\_key request parameter * not wait when instance evacuate error * drop mock from lower-constraints * Stop to use the \_\_future\_\_ module * Switch to newer openstackdocstheme and reno versions * Cap jsonschema 3.2.0 as the minimal version * Drop python 3.4 constraint on sphinx * Check if host belongs to a segment * Fix host rest url in api-ref doc * Remove translation sections from setup.cfg * Fix hacking min version to 3.0.1 * Monkey patch original current\_thread \_active * Bump default tox env from py37 to py38 * Add py38 package metadata * Add Python3 victoria unit tests * Update master for stable/ussuri * handle duplicate notifications without checking status * check expired notifications * Update hacking for Python3 9.0.0 ----- * Use unittest.mock instead of third party mock * Ignoring host recovery if host\_status is \`UNKNOWN\` * Fix the functional test devstack job * Drop use of SQLAlchemy-Utils * HostNotFound name->id * HypervisorNotFoundByName host\_name->hypervisor\_name * Add ignore\_basepython\_conflict in tox.ini * fix a typo notiification => notification * update segment api doc * Correct indentation for spaces for doc files * Fix constraints URL enforcement for lower-constraints * Sync Sphinx requirement * [ussuri][goal] Drop python 2.7 support and testing * Reorganize masakari documentation * Support noauth2 auth\_strategy * Update master for stable/train * Cleanup instances test data * Notification functional tests fails randomly 8.0.0 ----- * Remove references of novaclient.exceptions.EndpointNotFound * Add Python 3 Train unit tests * add libpq-dev and python-dev to build psycopg2 * Replace git.openstack.org URLs with opendev.org URLs * Update api-ref location * Resolve functional gate job failure * Resolve gate job failure * OpenDev Migration Patch * Switch from oslosphinx to openstackdocstheme * Fix typo for sgement => segment * Replace openstack.org git:// URLs with https:// * Update master for stable/stein * Add devstack support for masakari-monitors 7.0.0 ----- * Updated rh host workflow for recovery workflow details * Handle KeyError: 'progress\_details' * Migrate multinode jobs to Ubuntu Bionic * fix oslo-config-generator conf * Functional tests to check recovery workflow details * Functional tests for notification APIs * Configure taskflow connection * Add progress details for recovery workflows * Improve logging error messages * Functional tests for host APIs * Add functional CI job and tests for segments * Send notifications for all API changes * Add common methods to use for sending notification * Define objects used for notification * Add necessary constants and methods for RPC notification * Run all jobs by default using python3 * Py3: Ensure wsgi headers are not bytes type * Recover resized instance(power\_state=SHUTDOWN) with vm\_state=stopped * Fix parsing api version with correct service\_type * Update hacking version >1.1.0 * Allow updating host name only if it exists in nova * Add masakari-systemfault-integration-ci to .zuul.yaml * Use template for lower-constraints * Return 400 if notification payload is incorrect * change nova.services.disable use service\_uuid * Cleanup testrepository * Update coverage environment and use template for cover * Uninstall policy.json from openstack\_dashboard correctly 7.0.0.0b1 --------- * Allow adding host only if it exists in nova * Update the HTTP links to HTTPS * Change openstack-dev to openstack-discuss * Update README * Fix datetime microsecond discrepancy issue * Add framework for masakari-status upgrade check * Fix: TestMasakariAPI tests skipped due to missing \_\_init\_\_.py * Increment versioning with pbr instruction * fix tox python3 overrides * Fix docs create service for masakari Currently openstacksdk use instance-ha type service instead of masakari * switch documentation job to new PTI * import zuul job settings from project-config * Add masakari-custom-recovery-methods.conf in etc/masakari * Update reno for stable/rocky 6.0.0.0rc1 ---------- * Add hooks for recovery method customization plugins * Added devstack support for masakari-dashboard * Fix masakari installation failure for policy.json * Register and Document policy in code * Removed unnecessary parantheses in yield statements * Add line for PyCharm IDE in gitignore file 6.0.0.0b3 --------- * fix tox python3 overrides * Remove testrepository * Switch to using stestr * Avoid recovery from failure twice * Fix Context test cases 6.0.0.0b2 --------- * Fix module has no attribute 'parse\_args' * Remove a sample file of masakari.conf * Deploy masakari-api with uwsgi * Generalize DB conf group copying * Make accept-language tests work with webob 1.8.x * Enable mutable config in Masakari * Update "auth\_url" * Fix typo in api-ref/source/failover-segments.inc file 6.0.0.0b1 --------- * add lower-constraints job * Add db purge support * Completely remove mox from Masakari unittest * Fix stable branch releasenotes * Introspective Instance Monitoring through QEMU Guest Agent * Make readme more readable * Ship files like other openstack projects * Fix the docs page link * Use method validate\_integer from oslo.utils * Updated from global requirements * Updated from global requirements * Drop all qpid related explanations * Fix for difference in tzinfo.tzname value between py27 and py35 * Changed default value of 'wait\_period\_after\_power\_off' 5.0.0 ----- * Fix default value of 'nova\_catalog\_admin\_info' * Updated from global requirements 5.0.0.0b3 --------- * Remove use of unsupported TEMPEST\_SERVICES variable * Change masakari service-type from 'ha' to 'instance-ha' * Ignore '409 Conflict' when adding reserved\_host to aggregate * Stop using deprecated 'message' attribute in Exception * Improve the home-page url in setup.cfg * Evacuates instances which task\_state is not none * Remove unnecessary sleep * Updated from global requirements * Masakari operator's documentation * Masakari developer's documentation * Masakari API Specifications * Remove setting of version/release from releasenotes * Updated from global requirements * Updated from global requirements * Make eventlet hub use a monotonic clock * Fix devstack installation of masakari * Modify unit testcases according to oslo.context * Use service\_type and service\_name of nova\_catalog\_admin\_info * Add ssl support for masakari plugin * Hacking: do not translate log messages * Upgrade oslo.db and Replace DbMigrationError with DBMigrationError * iso8601.is8601.Utc No Longer Exists 4.0.0 ----- * Make 'error' instances recovery configurable * Add missing domain name to novaclient * Make provision to evacuate all instances * Enable global hacking checks and remove local checks * Fix 'host delete' delete's host of another segment * conf: Deprecate 'masakari\_topic' RPC options * Send global\_request\_id to nova when calls are made * Update the documentation link for doc migration * Remove 'on\_shared\_storage' parameter from nova evacuate * Remove unused methods * Remove support for py34 * Use os-testr instead of testr * Enable code coverage report in console output * Use get\_rpc\_transport instead of get\_transport * Fix race condition between evacuation and its confirmation * Updated from global requirements * Exclude on maintenance reserved\_hosts for host\_failure recovery * Optimize the link address * Fix identity\_admin * Revert "Fix os\_privileged\_user\_auth\_url" * Fix failing test cases * Fix os\_privileged\_user\_auth\_url * Ignore instance recovery for 'paused' or 'rescued' instance * Use DDT to reduce test code duplication * Updated from global requirements * Remove log translations * Implement auto\_priority and rh\_priority recovery\_methods * Updated from global requirements * Delete the failure host from reserved\_host * Don't pass argument sqlite\_db in set\_defaults() * Use HostAddressOpt for opts that accept IP and hostnames * Add license information in test-requirements.txt * Fix test method which has incorrect reference * Change nova\_catalog\_admin\_info to default to publicURL * Update to a newer hacking library * Prevent 404 error when adding reserved\_host to aggregate 3.0.0.0rc1 ---------- * Add reserved\_host to failed\_host's aggregate * Add host\_failure workflow for 'reserved\_host' recovery\_method * Tolerate jsonschema==2.6.0 * Fix release notes formatting * Release note for \_process\_unfinished\_notifications periodic task * Boolean filters are not working for host list api * InstanceFailure: Ignore notifications for certain events * Set access\_policy for messaging's dispatcher * Add ha enabled config options * Implement \_process\_unfinished\_notifications periodic tasks * Sync tox environment with upper-constraint of requirements * Add business rules for host and failover\_segment * Bump taskflow version to minimum 2.7.0 * Added engine manager and rpcapi related unit tests * Stop logging unnecessary warning on context create * Fix incorrect test class name * Update to match latest global-requirements * Use dict.items() and dict.values() as per guidelines * Add unit tests for notification business rules * Remove deps under pep8 section to enable hacking checks * Fix bad request error for valid marker in hosts * Extracted HTTP response codes to constants * Add unit tests for process failure flow * Remove unused test\_masakari module * [Devstack] Fix devstack plugin compatibility * Fix failing gate jobs due to removal of nova\_client.service\_catalog * Avoid printing traceback if VM is not HA\_Enabled * Stop instance only if it's HA\_Enabled * Fix spelling mistake and log message * Add database related test cases * Fix spelling mistake * Change default topic name of masakari-engine * Return correct response codes * Add unit tests for instance failure flow * Add unit tests for host failure workflow * Add missing namespaces in masakari-config-generator.conf * Add admin role for masakari user * Fix invalid port reference in devstack settings * Add unit tests for nova support * Set admin user as the default policy * Fix 500 error while create/update host 2.0.0 ----- * Add instance and process failure workflow * Add host failure workflow * Add nova support in masakari * Add TaskFlowDriver support to process notifications * Add business rule for process\_notification * Convert boolean parameters in host api * Remove pypy from tox env list * Return HTTPBadRequest instead of HTTPNotFound * Drop MANIFEST.in - it's not needed by pbr * Add process\_notification cast call * Add authentication libraries to requirements.txt * Add RPC layer for masakari * Enable release notes translation * Fix 500 if you pass invalid sort\_key and sort\_dir * Remove unused method * Enable masakari-engine in devstack * Add masakari-engine support * Add Masakari Object Serializer * Add notifications controller for masakari * Remove wrong reference to cinder * Remove reference of Nova * Added host controller for masakari * Added failover segments controller for masakari * Add notification related objects * Remove redundant str typecasting * Using assertIsNone() instead of assertIs(None, ..) * py33 is no longer supported by Infra's CI * Remove unexpected method argument * Clean imports in code * TrivialFix: Merge imports in code * TrivialFix: Remove logging import unused * Alter nullable constraint on created\_at column * Removed unnecessary sort\_keys and sort\_dirs * Add notification db apis and migration script * Don't attempt to escalate masakari-manage privileges * Dictionary creation could be rewritten with dictionary literal * Added masakari objects * Refactor: Move post method to APIValidationTestCase base class * Add test cases for 'extension\_info' module * Correct API Version String format * Return NotFound exception for delete db api * Cleanup tox.ini: Remove obsolete constraints * Add testing for WSGI Framework * Add schema framework for masakari * Added masakari db apis * Update unique constraint of hosts table * Add devstack plugin * Use %()d for integer substitution * Make use of oslo-config-generator * Dictionary creation could be rewritten with dictionary literal * Add db sync support for masakari * Add a hacking rule to enforce use of LOG.warning * Add a hacking rule for string interpolation at logging * Add hacking checks * Add support for wsgi framework * Add project description to README.rst * Initial Cookiecutter Commit * Added .gitreview ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/HACKING.rst0000664000175100017510000000604015033036143015224 0ustar00mylesmylesmasakari Style Commandments =========================== - Step 1: Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ - Step 2: Read on Masakari Specific Commandments ------------------------------ - [M301] no db session in public API methods (disabled) This enforces a guideline defined in ``oslo.db.sqlalchemy.session`` - [M302] timeutils.utcnow() wrapper must be used instead of direct calls to datetime.datetime.utcnow() to make it easy to override its return value in tests - [M303] capitalize help string Config parameter help strings should have a capitalized first letter - [M305] Change assertTrue(isinstance(A, B)) by optimal assert like assertIsInstance(A, B). - [M306] Change assertEqual(type(A), B) by optimal assert like assertIsInstance(A, B) - [M308] Validate that log messages are not translated. - [M309] Don't import translation in tests - [M310] Setting CONF.* attributes directly in tests is forbidden. Use self.flags(option=value) instead. - [M315] Method's default argument shouldn't be mutable - [M316] Ensure that the _() function is explicitly imported to ensure proper translations. - [M317] Ensure that jsonutils.%(fun)s must be used instead of json.%(fun)s - [M318] Change assertTrue/False(A in/not in B, message) to the more specific assertIn/NotIn(A, B, message) - [M319] Check for usage of deprecated assertRaisesRegexp - [M320] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs. - [M321] Change assertEqual(A in B, True), assertEqual(True, A in B), assertEqual(A in B, False) or assertEqual(False, A in B) to the more specific assertIn/NotIn(A, B) - [M322] Check masakari.utils.spawn() is used instead of greenthread.spawn() and eventlet.spawn() - [M323] contextlib.nested is deprecated - [M324] Config options should be in the central location ``masakari/conf/`` - [M325] Check for common double word typos - [M326] Python 3: do not use dict.iteritems. - [M327] Python 3: do not use dict.iterkeys. - [M328] Python 3: do not use dict.itervalues. - [M329] Deprecated library function os.popen() - [M331] LOG.warn is deprecated. Enforce use of LOG.warning. - [M332] Yield must always be followed by a space when yielding a value. - [M333] Policy registration should be in the central location ``masakari/policies/`` - [M334] Do not use the oslo_policy.policy.Enforcer.enforce() method. Use of pre-commit checks ------------------------ `pre-commit`_ is a software tool that allows us to manage pre-commit checks as part of the Git repository's configuration and to run checks as Git pre-commit hooks (or other types of Git hooks) automatically on developer machines. It helps to catch and fix common issues before they get pushed to the server. After the installation of the tool (e.g. on Fedora via `sudo dnf install pre-commit`) simply `cd` to the Git repository and run `pre-commit install` to let the tool install its Git pre-commit hook. From now on these predefined checks will run on files that you change in new Git commits. .. _pre-commit: https://pre-commit.com/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/LICENSE0000664000175100017510000002363715033036143014446 0ustar00mylesmyles Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/MANIFEST.in0000664000175100017510000000000015033036143015152 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/PKG-INFO0000644000175100017510000001135615033036146014532 0ustar00mylesmylesMetadata-Version: 2.2 Name: masakari Version: 19.1.0.dev18 Summary: Virtual Machine High Availability (VMHA) service for OpenStack Home-page: https://docs.openstack.org/masakari/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: Implementation :: CPython Requires-Python: >=3.10 License-File: LICENSE Requires-Dist: alembic>=1.8.0 Requires-Dist: iso8601>=0.1.11 Requires-Dist: jsonschema>=3.2.0 Requires-Dist: keystoneauth1>=3.4.0 Requires-Dist: keystonemiddleware>=4.17.0 Requires-Dist: WebOb>=1.7.1 Requires-Dist: microversion-parse>=0.2.1 Requires-Dist: oslo.config>=5.2.0 Requires-Dist: oslo.context>=2.19.2 Requires-Dist: oslo.db>=4.44.0 Requires-Dist: oslo.messaging>=14.1.0 Requires-Dist: oslo.i18n>=3.15.3 Requires-Dist: oslo.log>=3.36.0 Requires-Dist: oslo.middleware>=3.31.0 Requires-Dist: oslo.policy>=4.5.0 Requires-Dist: oslo.service!=1.28.1,>=1.24.0 Requires-Dist: oslo.upgradecheck>=1.3.0 Requires-Dist: oslo.utils>=4.7.0 Requires-Dist: oslo.versionedobjects>=1.31.2 Requires-Dist: pbr!=2.1.0,>=2.0.0 Requires-Dist: python-novaclient>=9.1.0 Requires-Dist: stevedore>=1.20.0 Requires-Dist: SQLAlchemy>=1.2.19 Requires-Dist: SQLAlchemy-Utils>=0.33.10 Requires-Dist: taskflow>=2.16.0 Requires-Dist: tooz>=2.10.1 Dynamic: author Dynamic: author-email Dynamic: classifier Dynamic: description Dynamic: requires-dist Dynamic: requires-python Dynamic: summary ======== Masakari ======== Virtual Machine High Availability (VMHA) service for OpenStack Masakari provides Virtual Machine High Availability (VMHA) service for OpenStack clouds by automatically recovering the KVM-based Virtual Machine(VM)s from failure events such as VM process down, provisioning process down, and nova-compute host failure. It also provides API service for manage and control the automated rescue mechanism. NOTE: Use masakari only if instance path is configured on shared storage system i.e, 'instances_path' config option of nova has a path of shared directory otherwise instance data will be lost after the evacuation of instance from failed host if, * instance is booted from image * flavor using ephemeral disks is used Original version of Masakari: https://github.com/ntt-sic/masakari Tokyo Summit Session: https://www.youtube.com/watch?v=BmjNKceW_9A Masakari is distributed under the terms of the Apache License, Version 2.0. The full terms and conditions of this license are detailed in the LICENSE file. * Free software: Apache license 2.0 * Documentation: https://docs.openstack.org/masakari/latest * Release notes: https://docs.openstack.org/releasenotes/masakari/ * Source: https://opendev.org/openstack/masakari * Bugs: https://bugs.launchpad.net/masakari Configure masakari-api ---------------------- #. Create masakari user: .. code-block:: shell-session openstack user create --password-prompt masakari (give password as masakari) #. Add admin role to masakari user: .. code-block:: shell-session openstack role add --project service --user masakari admin #. Create new service: .. code-block:: shell-session openstack service create --name masakari --description "masakari high availability" instance-ha #. Create endpoint for masakari service: .. code-block:: shell-session openstack endpoint create --region RegionOne masakari --publicurl http://:/v1/%\(tenant_id\)s #. Clone masakari using .. code-block:: shell-session git clone https://github.com/openstack/masakari.git #. Run setup.py from masakari .. code-block:: shell-session sudo python setup.py install #. Create directory ``/etc/masakari`` #. Copy ``masakari.conf``, ``api-paste.ini`` and ``policy.yaml`` file from ``masakari/etc/`` to ``/etc/masakari`` folder #. To run masakari-api simply use following binary: .. code-block:: shell-session masakari-api Configure masakari database --------------------------- #. Create 'masakari' database #. After running setup.py for masakari (``sudo python setup.py install``), run ``masakari-manage`` command to sync the database .. code-block:: shell-session masakari-manage db sync Features -------- * TODO ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/README.rst0000664000175100017510000000534515033036143015124 0ustar00mylesmyles======== Masakari ======== Virtual Machine High Availability (VMHA) service for OpenStack Masakari provides Virtual Machine High Availability (VMHA) service for OpenStack clouds by automatically recovering the KVM-based Virtual Machine(VM)s from failure events such as VM process down, provisioning process down, and nova-compute host failure. It also provides API service for manage and control the automated rescue mechanism. NOTE: Use masakari only if instance path is configured on shared storage system i.e, 'instances_path' config option of nova has a path of shared directory otherwise instance data will be lost after the evacuation of instance from failed host if, * instance is booted from image * flavor using ephemeral disks is used Original version of Masakari: https://github.com/ntt-sic/masakari Tokyo Summit Session: https://www.youtube.com/watch?v=BmjNKceW_9A Masakari is distributed under the terms of the Apache License, Version 2.0. The full terms and conditions of this license are detailed in the LICENSE file. * Free software: Apache license 2.0 * Documentation: https://docs.openstack.org/masakari/latest * Release notes: https://docs.openstack.org/releasenotes/masakari/ * Source: https://opendev.org/openstack/masakari * Bugs: https://bugs.launchpad.net/masakari Configure masakari-api ---------------------- #. Create masakari user: .. code-block:: shell-session openstack user create --password-prompt masakari (give password as masakari) #. Add admin role to masakari user: .. code-block:: shell-session openstack role add --project service --user masakari admin #. Create new service: .. code-block:: shell-session openstack service create --name masakari --description "masakari high availability" instance-ha #. Create endpoint for masakari service: .. code-block:: shell-session openstack endpoint create --region RegionOne masakari --publicurl http://:/v1/%\(tenant_id\)s #. Clone masakari using .. code-block:: shell-session git clone https://github.com/openstack/masakari.git #. Run setup.py from masakari .. code-block:: shell-session sudo python setup.py install #. Create directory ``/etc/masakari`` #. Copy ``masakari.conf``, ``api-paste.ini`` and ``policy.yaml`` file from ``masakari/etc/`` to ``/etc/masakari`` folder #. To run masakari-api simply use following binary: .. code-block:: shell-session masakari-api Configure masakari database --------------------------- #. Create 'masakari' database #. After running setup.py for masakari (``sudo python setup.py install``), run ``masakari-manage`` command to sync the database .. code-block:: shell-session masakari-manage db sync Features -------- * TODO ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923813.0 masakari-19.1.0.dev18/RELEASENOTES.rst0000664000175100017510000011766715033036145016075 0ustar00mylesmyles======== masakari ======== .. _masakari_19.0.0-12: 19.0.0-12 ========= .. _masakari_19.0.0-12_New Features: New Features ------------ .. releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml @ b'300fa732dc27fdb3af0d87b4049ef59c5aa8b928' - A new module, ``masakari.wsgi``, has been added as a place to gather WSGI ``application`` objects. This is intended to ease deployment by providing a consistent location for these objects. For example, if using uWSGI then instead of: .. code-block:: ini [uwsgi] wsgi-file = /bin/masakari-wsgi You can now use: .. code-block:: ini [uwsgi] module = masakari.wsgi.api:application This also simplifies deployment with other WSGI servers that expect module paths such as gunicorn. .. _masakari_19.0.0-12_Upgrade Notes: Upgrade Notes ------------- .. releasenotes/notes/drop-python-38-39-deab0b81006bae48.yaml @ b'c39b9bd7fdc66e684567224f9cd64634c82d9670' - Support for Python 3.8 and Python 3.9 has been removed. The minimum supported version of Python is now Python 3.10. .. releasenotes/notes/remove-masakari-wsgi-script-bb737746a17111ab.yaml @ b'ccaecd325f86b46f86274fdeafa661aee436a434' - The WSGI script ``masakari-wsgi`` has been removed. Deployment tooling should instead reference the Python module path for the wsgi module in Masakari, ``masakari.wsgi.api:application`` if their chosen WSGI server supports this (gunicorn, uWSGI, etc.) or implement a ``.wsgi`` script themselves if not (mod_wsgi). .. _masakari_2.0.0: 2.0.0 ===== .. _masakari_2.0.0_New Features: New Features ------------ .. releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml @ b'd906541e21248148c2de1b7900006b5164543288' - Added following new REST API's for masakari operators - 1. GET /v1/segments Returns list of all failover segments. 2. GET /v1/segments/ Returns specific failover segment with uuid. 3. POST /v1/segments Creates a new failover segment 4. PUT /v1/segments/ Updates a failover segment by uuid 5. DELETE /v1/segments/ Delete a failover segment by uuid .. releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml @ b'2e267e66044ccc4f4fd37ca5c0f3faa871ffc3fb' - Added following new REST API's for masakari operators - 1. GET /v1/segments//hosts Returns list of all hosts associated with failover segment. 2. GET /v1/segments//hosts/ Returns specific host from the failover segment with uuid. 3. POST /v1/segments//hosts Creates a new host in failover segment 4. PUT /v1/segments//hosts/ Updates a host in failover segment by uuid 5. DELETE /v1/segments//hosts/ Delete a host from failover segment by uuid .. releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml @ b'44f7699e48510916b802b814b1b0fd7d64c81f3d' - Added following new REST API's related to notifications - 1. GET /v1/notifications Returns list of all notifications. 2. GET /v1/notifications/ Returns specific notification with uuid. 3. POST /v1/notifications Creates a new notification. .. _masakari_2.0.0_Other Notes: Other Notes ----------- .. releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml @ b'27977c10eed24fb327a1e2c86fd12e72613083c0' - Adopt oslo-config-generator to generate sample config files. New config options from masakari code should register with masakari/conf/opts.py. A deprecated option should add a deprecated group even if it didn't alter its group, otherwise the deprecated group will use 'DEFAULT' by default. .. _masakari_3.0.0.0rc1: 3.0.0.0rc1 ========== .. _masakari_3.0.0.0rc1_New Features: New Features ------------ .. releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml @ b'00bc5612a834823d21192a0fc328b53ccf44b0be' - Added _process_unfinished_notifications to process notifications which are in error or new state. This periodic task will execute at regular interval defined by new config option 'process_unfinished_notifications_interval' defaults to 120 seconds. The notifications which are in ‘new’ status will be picked up based on a new config option ‘retry_notification_new_status_interval’ defaults to 60 seconds. To change the default execution time of periodic task, following config option needs to be set with desirable time under 'DEFAULT' section in 'masakari.conf' file:: [DEFAULT] process_unfinished_notifications_interval = 120 To change the default identification time of notifications which are stuck in 'NEW' state, following config option needs to be set with desirable time under 'DEFAULT' section in 'masakari.conf' file:: [DEFAULT] retry_notification_new_status_interval = 60 .. releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml @ b'77f51c51a937d0ed9f1f31522593bae26f15e04a' - Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be allowed for evacuation from a failed source compute node. When set to True, it will evacuate all instances from a failed source compute node. First preference will be given to those instances which contain 'HA_Enabled=True' metadata key, and then it will evacuate the remaining ones. When set to False, it will evacuate only those instances which contain 'HA_Enabled=True' metadata key. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] evacuate_all_instances = True .. releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml @ b'77f51c51a937d0ed9f1f31522593bae26f15e04a' - Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be taken into account to recover from instance failure events. When set to True, it will execute instance failure recovery actions for an instance irrespective of whether that particular instance contains metadata key 'HA_Enabled=True' or not. When set to False, it will only execute instance failure recovery action for an instance which contain metadata key 'HA_Enabled=True'. To use this feature, following config option need to be set under ``instance_failure`` section in 'masakari.conf' file:: [instance_failure] process_all_instances = True .. releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml @ b'd45f754cbb6ca73ebeccf896b8a48bd4682e1cdc' - Operators can now decide based on the new config option 'add_reserved_host_to_aggregate' whether to add or not a reserved_host to all host aggregates which failed compute host belongs to. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] add_reserved_host_to_aggregate = True .. releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml @ b'3f254e32a945b9504717a1f91c5eaa24b154df52' - Implemented workflow for 'reserved_host' recovery method in case of host failure. Now operator can create or update failover segment with 'reserved_host' recovery method along with the existing 'auto' method. When 'reserved_host' recovery_method is set to a failover segment, operators should also add one or more hosts with reserved flag set as True. .. _masakari_3.0.0.0rc1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml @ b'77f51c51a937d0ed9f1f31522593bae26f15e04a' - Fixes `bug 1645699`_ which will return correct response codes for below apis: - POST /v1/notification - old_response: 200, new_response: 202 - DELETE /v1/notification - old_response: 404, new_response: 405 - PUT /v1/notification/ - old_response: 404, new_response: 405 - POST /v1/host - old_response: 200, new_response: 201 - DELETE /v1/host/ - old_response: 200, new_response: 204 - POST /v1/segment - old_response: 200, new_response: 201 - DELETE /v1/segment/ - old_response: 200, new_response: 204 .. _bug 1645699: https://bugs.launchpad.net/masakari/+bug/1645699 .. _masakari_4.0.0: 4.0.0 ===== .. _masakari_4.0.0_Prelude: Prelude ------- .. releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml @ b'6139da28da2adc2586b34ff9a6a9cb561b7f40fb' Domain name is needed when using keystone v3 to create keystone session, if not provided, InvalidInput exception will be raised. Two new options "os_user_domain_name" and "os_project_domain_name" with default value "default" are added to fix the issue. .. _masakari_4.0.0_New Features: New Features ------------ .. releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml @ b'a67e0c50385a189006568817ff112c286a0048aa' - Operators can decide whether error instances should be allowed for evacuation along with other instances from a failed source compute node or not. Added a new config option ``ignore_instances_in_error_state`` to achieve this. When set to True, masakari will skip the recovery of error instances otherwise it will evacuate error instances as well from a failed source compute node. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] ignore_instances_in_error_state = False The default value for this config option is set to False. .. releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yaml @ b'4861413682d62207e34393c6f5629fe66c626179' - Implemented workflow for 'auto_priority' and 'rh_priority' recovery methods in case of host failure recovery. Operators can set failover_segment's recovery_method as 'auto_priority' and 'rh_priority' now. In case of 'auto_priority' the 'auto' workflow will be executed first to recover the instances from failed compute host. If 'auto' workflow fails to recover the instances then 'reserved_host' workflow will be tried. In case of 'rh_priority' the 'reserved_host' workflow will be executed first to recover the instances from failed compute host. If 'reserved_host' workflow fails to recover the instances then 'auto' workflow will be tried. .. _masakari_4.0.0_Deprecation Notes: Deprecation Notes ----------------- .. releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml @ b'62128ecf4b111f5db9d0e81e958e924220856a14' - The ``masakari_topic`` config option is now deprecated and will be removed in the Queens release. .. _masakari_4.0.0_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml @ b'25d33d2cb1eec271227309a34a45b0d3f0987d50' - Fixes `bug 1693728`_ which will fix the race condition where after evacuation of an instance to other host user might perform some actions on that instance which gives wrong instance vm_state to ConfirmEvacuationTask that results into notification failure. To fix this issue, following config option is added under ``DEFAULT`` section in 'masakari.conf' file:: [DEFAULT] host_failure_recovery_threads = 3 This config option decides the number of threads going to be used for evacuating the instances. .. _`bug 1693728`: https://bugs.launchpad.net/masakari/+bug/1693728 .. _masakari_6.0.0: 6.0.0 ===== .. _masakari_6.0.0_New Features: New Features ------------ .. releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml @ b'4299b38883c7c711ff3e349f5b134b6c9a272caf' - Masakari has been enabled for mutable config. Below option may be reloaded by sending SIGHUP to the correct process. 'retry_notification_new_status_interval' option will apply to process unfinished notifications. .. releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml @ b'4048b1fd8eae065652105d19892071b0a4fa5533' - Operators can now purge the soft-deleted records from the database tables. Added below command to purge the records: ``masakari-manage db purge --age_in_days --max_rows `` NOTE: ``notifications`` db records will be purged on the basis of ``update_at`` and ``status`` fields (finished, ignored, failed) as these records will not be automatically soft-deleted by the system. .. releasenotes/notes/policy-in-code-8740d51624055044.yaml @ b'd7592cbe254e9337ed08d5454d0fbd88404ddc0d' - Masakari now support policy in code, which means if operators doesn't need to modify any of the default policy rules, they do not need a policy file. Operators can modify/generate a ``policy.yaml.sample`` file which will override specific policy rules from their defaults. Masakari is now configured to work with two oslo.policy CLI scripts that have been added: - The first of these can be called like ``oslopolicy-list-redundant --namespace masakari`` and will output a list of policy rules in policy.[json|yaml] that match the project defaults. These rules can be removed from the policy file as they have no effect there. - The second script can be called like ``oslopolicy-policy-generator --namespace masakari --output-file policy-merged.yaml`` and will populate the policy-merged.yaml file with the effective policy. This is the merged results of project defaults and config file overrides. NOTE: Default `policy.json` file is now removed as Masakari now uses default policies. A policy file is only needed if overriding one of the defaults. .. releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml @ b'ad3dc737c984c267980e7479acc2bf8856b556d5' - Operator can now customize workflows to process each type of failure notifications (hosts, instance and process) as per their requirements. Added below new config section for customized recovery flow in a new conf file masakari-custom-recovery-methods.conf - [taskflow_driver_recovery_flows] Under [taskflow_driver_recovery_flows] is added below five new config options - 'instance_failure_recovery_tasks' is a dict of tasks which will recover instance failure. - 'process_failure_recovery_tasks' is a dict of tasks which will recover process failure. - 'host_auto_failure_recovery_tasks' is a dict of tasks which will recover host failure for auto recovery. - 'host_rh_failure_recovery_tasks' is a dict of tasks which will recover host failure for rh recovery on failure host. .. _masakari_6.0.0_Upgrade Notes: Upgrade Notes ------------- .. releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml @ b'5bbd78e326e7726229bb94f887f18f8b27bb7a14' - WSGI application script ``masakari-wsgi`` is now available. It allows running the masakari APIs using a WSGI server of choice (for example nginx and uwsgi, apache2 with mod_proxy_uwsgi or gunicorn). The eventlet-based servers are still available, but the WSGI options will allow greater deployment flexibility. .. _masakari_7.0.0: 7.0.0 ===== .. _masakari_7.0.0_Prelude: Prelude ------- .. releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml @ b'7700cdd3bf4c47c97b8587428c352390271692b2' Added new tool ``masakari-status upgrade check``. .. _masakari_7.0.0_New Features: New Features ------------ .. releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml @ b'7700cdd3bf4c47c97b8587428c352390271692b2' - New framework for ``masakari-status upgrade check`` command is added. This framework allows adding various checks which can be run before a Masakari upgrade to ensure if the upgrade can be performed safely. .. releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml @ b'0616b4bd99270c9b4bb1d1df17fad1cb3a50421b' - Added support to emit event notifications whenever user interacts with Masakari restFul APIs. The emitted notifications are documented at `sample_payloads`_. To enable this feature one should set `driver` config option under the `oslo_messaging_notifications` section as shown below:: [oslo_messaging_notifications] driver = log Note: Possible values are `messaging`, `messagingv2`, `routing`, `log`, `test`, `noop`. Notifications can be completely disabled by setting `driver` value as `noop` .. _`sample_payloads`: https://docs.openstack.org/masakari/latest/#versioned-notifications .. releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml @ b'7321ee32fdc47c408a13120b1d76f11859190b2e' - Added support to record the recovery workflow details of the notification which will be returned in a new microversion 1.1 in `GET /notifications/{notification_id}` API. For example, GET /notifications/ response will contain `recovery_workflow_details` parameter as shown here `notification_details`_ Added a new config section in Masakari conf file for configuring the back end to be used by taskflow driver:: [taskflow] # The back end for storing recovery_workflow details of the notification. # (string value) connection = mysql+pymysql://root:admin@127.0.0.1/?charset=utf8 # Where db_name, can be a new database or you can also specify masakari # database. Operator should run `masakari-manage db sync` command to add new db tables required for storing recovery_workflow_details. Note: When you run `masakari-manage db sync`, make sure you have `notification_driver=taskflow_driver` set in masakari.conf. .. _`notification_details`: https://developer.openstack.org/api-ref/instance-ha/?expanded=show-notification-details-detail#show-notification-details .. _masakari_7.0.0_Upgrade Notes: Upgrade Notes ------------- .. releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml @ b'7700cdd3bf4c47c97b8587428c352390271692b2' - Operator can now use new CLI tool ``masakari-status upgrade check`` to check if Masakari deployment can be safely upgraded from N-1 to N release. .. _masakari_8.1.2: 8.1.2 ===== .. _masakari_8.1.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml @ b'3271a7c7446cb32bd3975b4a0d50aceec1a1228e' - Fixes ``/v1/`` API path which returned 404 ResourceNotFound preventing microversion discovery. `LP#1685145 `__ .. releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml @ b'9280560731c91cfbf52c0afd546534ba113e83e5' - Allows segment description to contain new line characters. `LP#1776385 `__ .. releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml @ b'bbead85247bdc970555b2579dcd3d4cb3a3ccebb' - Adds ``reserved_host`` to all aggregates of the failing host, instead of just the first one. `LP#1856164 `__ .. releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml @ b'3271a7c7446cb32bd3975b4a0d50aceec1a1228e' - Fixes API microversion reporting to report the latest supported microversion. `LP#1882516 `__ .. releasenotes/notes/compute_search-3da97e69e661a73f.yaml @ b'9eff08c7a8041c28a5f9c5ecb2b9915e7f42ce8c' - Fixes validation of compute host existence from checking hypervisor list to compute service list. Since masakari needs to match nova compute service hostname with the one in pacemaker cluster and added to API for correctly processing hostmonitors failover notifications. .. _masakari_8.1.1: 8.1.1 ===== .. _masakari_8.1.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml @ b'e915ade85ad27a9de70d59377a9a255f21c6c8dd' - Fixes Masakari Engine not to try to stop an already stopped instance and fail with 409 from Nova. `LP#1782517 `__ .. releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml @ b'e915ade85ad27a9de70d59377a9a255f21c6c8dd' - Fixes Masakari Engine not to wait for timeout when it's known that the evacuation has failed. `LP#1859406 `__ (This fix has already been included in the first Victoria release, 10.0.0, but it was not mentioned in the release notes previously.) .. releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml @ b'315113d9749805698003892dbaa42debbc549354' - Fixes an issue where a periodic task in Masakari Engine could loop forever querying Nova API following a failed evacuation. `LP#1897888 `__ .. _masakari_9.1.3: 9.1.3 ===== .. _masakari_9.1.3_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1932194-2b721860bbc26819.yaml @ b'b965f3a253e7eca08ac765be90b7c1ae3d86369f' - Fixes Masakari API to properly return error codes for invalid requests to the user instead of 500. `LP#1932194 `__ .. _masakari_9.1.2: 9.1.2 ===== .. _masakari_9.1.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml @ b'aabdcda98beb49fff1f8ade8bf2fe3c201f4a052' - Fixes ``/v1/`` API path which returned 404 ResourceNotFound preventing microversion discovery. `LP#1685145 `__ .. releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml @ b'b5ecfde6d2bd043188e220cdc88fd96fe8e93a7f' - Allows segment description to contain new line characters. `LP#1776385 `__ .. releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml @ b'314026ce156af7b4d674d9947e7d648502082893' - Adds ``reserved_host`` to all aggregates of the failing host, instead of just the first one. `LP#1856164 `__ .. releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml @ b'aabdcda98beb49fff1f8ade8bf2fe3c201f4a052' - Fixes API microversion reporting to report the latest supported microversion. `LP#1882516 `__ .. releasenotes/notes/compute_search-3da97e69e661a73f.yaml @ b'35519c0ce02092aaef6b802817b74d017a84b08b' - Fixes validation of compute host existence from checking hypervisor list to compute service list. Since masakari needs to match nova compute service hostname with the one in pacemaker cluster and added to API for correctly processing hostmonitors failover notifications. .. _masakari_9.1.1: 9.1.1 ===== .. _masakari_9.1.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml @ b'458698ff74205af29c22fd7f20f870916eaf1b16' - Fixes Masakari Engine not to try to stop an already stopped instance and fail with 409 from Nova. `LP#1782517 `__ .. releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml @ b'458698ff74205af29c22fd7f20f870916eaf1b16' - Fixes Masakari Engine not to wait for timeout when it's known that the evacuation has failed. `LP#1859406 `__ (This fix has already been included in the first Victoria release, 10.0.0, but it was not mentioned in the release notes previously.) .. releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml @ b'322baaa4c23cb7738a9c8260a333607eb20b584c' - Fixes an issue where a periodic task in Masakari Engine could loop forever querying Nova API following a failed evacuation. `LP#1897888 `__ .. _masakari_9.0.0: 9.0.0 ===== .. _masakari_9.0.0_Upgrade Notes: Upgrade Notes ------------- .. releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml @ b'd46ea6b11032eb4628d8cf9c7533e12d804a8004' - Python 2.7 support has been dropped. Last release of Masakari to support python 2.7 is OpenStack Train. The minimum version of Python now supported by Masakari is Python 3.6. .. _masakari_10.0.3: 10.0.3 ====== .. _masakari_10.0.3_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1932194-2b721860bbc26819.yaml @ b'9a11cd3d6342d576b9221a1e4f49abdf05ae77a8' - Fixes Masakari API to properly return error codes for invalid requests to the user instead of 500. `LP#1932194 `__ .. _masakari_10.0.2: 10.0.2 ====== .. _masakari_10.0.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml @ b'64db0fe87c6d97978c46033212df0488ac7cfed9' - Fixes ``/v1/`` API path which returned 404 ResourceNotFound preventing microversion discovery. `LP#1685145 `__ .. releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml @ b'9af584c5c92adaf9b172351ada9bfaed24d4ef36' - Allows segment description to contain new line characters. `LP#1776385 `__ .. releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml @ b'b6e29161be481f84334cdda8ca5b561bd4763e9d' - Adds ``reserved_host`` to all aggregates of the failing host, instead of just the first one. `LP#1856164 `__ .. releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml @ b'64db0fe87c6d97978c46033212df0488ac7cfed9' - Fixes API microversion reporting to report the latest supported microversion. `LP#1882516 `__ .. _masakari_10.0.1: 10.0.1 ====== .. _masakari_10.0.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml @ b'da2a5f01534d228311f5d5fa7b31c9917870a563' - Fixes Masakari Engine not to try to stop an already stopped instance and fail with 409 from Nova. `LP#1782517 `__ .. releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml @ b'da2a5f01534d228311f5d5fa7b31c9917870a563' - Fixes Masakari Engine not to wait for timeout when it's known that the evacuation has failed. `LP#1859406 `__ (This fix has already been included in the first Victoria release, 10.0.0, but it was not mentioned in the release notes previously.) .. releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml @ b'802d88b28a1ebf8966f45a7ff39c686836ba067c' - Fixes an issue where a periodic task in Masakari Engine could loop forever querying Nova API following a failed evacuation. `LP#1897888 `__ .. _masakari_10.0.0: 10.0.0 ====== .. _masakari_10.0.0_New Features: New Features ------------ .. releasenotes/notes/customisable-ha-enabled-instance-metadata-key-af511ea2aac96690.yaml @ b'4397088da71d3a5d75a61b44f0ec4e0e711cbe97' - Adds ``ha_enabled_instance_metadata_key`` config option to ``host_failure`` and ``instance_failure`` config groups. This option allows operators to override the default ``HA_Enabled`` instance metadata key which controls the behaviour of Masakari towards the instance. This way one can have different keys for different failure types (host vs instance failures). .. _masakari_10.0.0_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/compute_search-3da97e69e661a73f.yaml @ b'4322968b893b242f229912c2b70e3895f0227402' - Fixes validation of compute host existence from checking hypervisor list to compute service list. Since masakari needs to match nova compute service hostname with the one in pacemaker cluster and added to API for correctly processing hostmonitors failover notifications. .. _masakari_11.0.3: 11.0.3 ====== .. _masakari_11.0.3_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml @ b'e3f6e78847568121529d24d1fd71b166cee965af' - Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ .. _masakari_11.0.2: 11.0.2 ====== .. _masakari_11.0.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1980736-975ee013e4612062.yaml @ b'1680a7320a36faa463d3e6b098b26f478a324996' - Fixes "Instance stopping fails randomly due to already stopped instances". `LP#1980736 `__ .. _masakari_11.0.1: 11.0.1 ====== .. _masakari_11.0.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1932194-2b721860bbc26819.yaml @ b'e3cbcd25b9a31286901ec3159ecc85637183023c' - Fixes Masakari API to properly return error codes for invalid requests to the user instead of 500. `LP#1932194 `__ .. _masakari_11.0.0: 11.0.0 ====== .. _masakari_11.0.0_New Features: New Features ------------ .. releasenotes/notes/blueprint-support-nova-system-scope-policies-c4dbd244dd3fcf1a.yaml @ b'0a5ae8b40267dc353d3a4e3d1754d36e77bf0f06' - Allows to use system-scoped tokens when contacting Nova. `Blueprint support-nova-system-scope-policies `__ .. releasenotes/notes/enabled-to-segment-7e6184feb1e4f818.yaml @ b'fe88eae9cbead079fd4d18cf79890e308bfbc133' - Sometimes, operators want to temporarily disable instance-ha function. This version adds 'enabled' to segment. If the segment 'enabled' value is set False, all notifications of this segment will be ignored and no recovery methods will execute. .. _masakari_11.0.0_Upgrade Notes: Upgrade Notes ------------- .. releasenotes/notes/deprecate-json-formatted-policy-file-57ad537ec19cc7e0.yaml @ b'513c29078ffa52f50bc75528c891eef2e77eda99' - The default value of ``[oslo_policy] policy_file`` config option has been changed from ``policy.json`` to ``policy.yaml``. Operators who are utilizing customized or previously generated static policy JSON files (which are not needed by default), should generate new policy files or convert them in YAML format. Use the `oslopolicy-convert-json-to-yaml `_ tool to convert a JSON to YAML formatted policy file in backward compatible way. .. _masakari_11.0.0_Deprecation Notes: Deprecation Notes ----------------- .. releasenotes/notes/deprecate-json-formatted-policy-file-57ad537ec19cc7e0.yaml @ b'513c29078ffa52f50bc75528c891eef2e77eda99' - Use of JSON policy files was deprecated by the ``oslo.policy`` library during the Victoria development cycle. As a result, this deprecation is being noted in the Wallaby cycle with an anticipated future removal of support by ``oslo.policy``. As such operators will need to convert to YAML policy files. Please see the upgrade notes for details on migration of any custom policy files. .. _masakari_11.0.0_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml @ b'30842faeaa52d40f6a00765e797e3a173a15f441' - Fixes ``/v1/`` API path which returned 404 ResourceNotFound preventing microversion discovery. `LP#1685145 `__ .. releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml @ b'd7f47f262799b285080cb08830560891c532a5a1' - Allows segment description to contain new line characters. `LP#1776385 `__ .. releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml @ b'7f76081ccf167141be07a1fa1b46b92d2da2cee1' - Fixes Masakari Engine not to try to stop an already stopped instance and fail with 409 from Nova. `LP#1782517 `__ .. releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml @ b'd1850df674aa3390413ead9b07faa2ba5d7dbea4' - Adds ``reserved_host`` to all aggregates of the failing host, instead of just the first one. `LP#1856164 `__ .. releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml @ b'7f76081ccf167141be07a1fa1b46b92d2da2cee1' - Fixes Masakari Engine not to wait for timeout when it's known that the evacuation has failed. `LP#1859406 `__ (This fix has already been included in the first Victoria release, 10.0.0, but it was not mentioned in the release notes previously.) .. releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml @ b'30842faeaa52d40f6a00765e797e3a173a15f441' - Fixes API microversion reporting to report the latest supported microversion. `LP#1882516 `__ .. releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml @ b'df63714b03cef5906791f14bbf2416beab23ca0a' - Fixes an issue where a periodic task in Masakari Engine could loop forever querying Nova API following a failed evacuation. `LP#1897888 `__ .. _masakari_12.0.3: 12.0.3 ====== .. _masakari_12.0.3_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml @ b'4f6a740c2c40729122446a2766127363799df2b0' - Fixes an issue where failure notification stuck into running status when timeout. `LP#1996835 `__ .. _masakari_12.0.2: 12.0.2 ====== .. _masakari_12.0.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml @ b'a5b7564f4453e7ec7df4021a852eb40b0bd631ca' - Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ .. _masakari_12.0.1: 12.0.1 ====== .. _masakari_12.0.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1980736-975ee013e4612062.yaml @ b'ebfbd6de4357b89d836e9fdb1bb3b3565dce023b' - Fixes "Instance stopping fails randomly due to already stopped instances". `LP#1980736 `__ .. _masakari_12.0.0: 12.0.0 ====== .. _masakari_12.0.0_New Features: New Features ------------ .. releasenotes/notes/compute-disable-reason-9570734c0bb888cf.yaml @ b'c861437b52ca0c415c4239dbf86da25877e0954e' - Nova compute service "disable reason" is now set in case of host or process failure. It can be customised per type of failure via config. .. _masakari_12.0.0_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1932194-2b721860bbc26819.yaml @ b'5c6fd44504e47ddbec3e0c29d454daeda87f86a4' - Fixes Masakari API to properly return error codes for invalid requests to the user instead of 500. `LP#1932194 `__ .. _masakari_13.0.3: 13.0.3 ====== .. _masakari_13.0.3_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml @ b'9d1b9e3e9bd2d88749eae279d65149301f3db5a9' - Fixes an issue where failure notification stuck into running status when timeout. `LP#1996835 `__ .. _masakari_13.0.2: 13.0.2 ====== .. _masakari_13.0.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml @ b'f844b8f48e75c0853b19324d000a021c5f02d385' - Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ .. _masakari_13.0.1: 13.0.1 ====== .. _masakari_13.0.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1980736-975ee013e4612062.yaml @ b'f2c4aada79b455836ac733c41daea474dc5979e3' - Fixes "Instance stopping fails randomly due to already stopped instances". `LP#1980736 `__ .. _masakari_14.0.2: 14.0.2 ====== .. _masakari_14.0.2_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml @ b'e496aad0ecc84beb3b000019bf45b244e6bf5fa6' - Fixes an issue where failure notification stuck into running status when timeout. `LP#1996835 `__ .. _masakari_14.0.1: 14.0.1 ====== .. _masakari_14.0.1_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml @ b'c813fa9622d4f41d8de542237368a5f34e6bc371' - Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ .. _masakari_14.0.0: 14.0.0 ====== .. _masakari_14.0.0_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1980736-975ee013e4612062.yaml @ b'7241101163550b3cd981262468dc523ccb25cc9e' - Fixes "Instance stopping fails randomly due to already stopped instances". `LP#1980736 `__ .. releasenotes/notes/coordination_for_host_notification-a156ec5a5839a781.yaml @ b'9fef8807cfeb793e838727c227e3aca4fee9aa4d' - Fixes an issue which triggers two recovery workflows for the same host failure. `LP#1961110 `__ It introduces distributed lock for Masakari-api services when handle the concurrent notifications for the same host failure from multiple Masakari-hostmonitor services. To enable coordination, the user needs to set the new configuration option ``[coordination]backend_url``, which specifies the backend. .. _masakari_15.0.0: 15.0.0 ====== .. _masakari_15.0.0_Bug Fixes: Bug Fixes --------- .. releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml @ b'0837d7787c1be35df4b4330dbe6cdd2c160a69ab' - Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ .. releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml @ b'7ec3edda1ada9c2464d79c84b0fd1d1be22f9336' - Fixes an issue where failure notification stuck into running status when timeout. `LP#1996835 `__ .. _masakari_16.0.0: 16.0.0 ====== .. _masakari_16.0.0_New Features: New Features ------------ .. releasenotes/notes/blueprint-add-vmoves-348fd430aa936721.yaml @ b'17ebad53979ff590ef70c67366d4ea3ab71d0896' - It persists vm moves information for one host failure notification into the database, which could help users to insight the process or result of the host recovery workflow, such as which vms evacuated succeed or failed, and which ones are still evacuating. Added VMove API in microversion 1.3, please refer to .. _`VMoves`: https://docs.openstack.org/api-ref/instance-ha/#vmoves-vmoves .. _masakari_17.0.0: 17.0.0 ====== .. _masakari_17.0.0_Upgrade Notes: Upgrade Notes ------------- .. releasenotes/notes/switch-to-alembic-b438de67c5b22a40.yaml @ b'6823ea5ed1ceb0a4b69bf45f0566962c68d6341f' - The database migration engine has changed from `sqlalchemy-migrate`__ to `alembic`__. For most deployments, this should have minimal to no impact and the switch should be mostly transparent. The main user-facing impact is the change in schema versioning. While sqlalchemy-migrate used a linear, integer-based versioning scheme, which required placeholder migrations to allow for potential migration backports, alembic uses a distributed version control-like schema where a migration's ancestor is encoded in the file and branches are possible. The alembic migration files therefore use a arbitrary UUID-like naming scheme and the ``masakari-manage db_sync`` command now expects such an version when manually specifying the version that should be applied. For example:: $ masakari-manage db sync c6214ca60943 Attempting to specify an sqlalchemy-migrate-based version will result in an error. .. __: https://sqlalchemy-migrate.readthedocs.io/en/latest/ .. __: https://alembic.sqlalchemy.org/en/latest/ .. _masakari_19.0.0-3: 19.0.0-3 ======== .. _masakari_19.0.0-3_New Features: New Features ------------ .. releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml @ b'0fd34dd6a6d90525dbf806f35577c5ee1d7e9444' - A new module, ``masakari.wsgi``, has been added as a place to gather WSGI ``application`` objects. This is intended to ease deployment by providing a consistent location for these objects. For example, if using uWSGI then instead of: .. code-block:: ini [uwsgi] wsgi-file = /bin/masakari-wsgi You can now use: .. code-block:: ini [uwsgi] module = masakari.wsgi.api:application This also simplifies deployment with other WSGI servers that expect module paths such as gunicorn. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5177102 masakari-19.1.0.dev18/api-ref/0000775000175100017510000000000015033036146014754 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5217102 masakari-19.1.0.dev18/api-ref/source/0000775000175100017510000000000015033036146016254 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/conf.py0000664000175100017510000001512215033036143017551 0ustar00mylesmyles# Copyright (c) 2017 NTT Data # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # masakari documentation build configuration file. # # This file is execfile()d with the current directory set to # its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys extensions = [ 'os_api_ref', 'openstackdocstheme', ] html_theme = 'openstackdocs' html_theme_options = { "sidebar_mode": "toc", } # If extensions (or modules to document with autodoc) are in another # directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown # here. sys.path.insert(0, os.path.abspath('../../')) sys.path.insert(0, os.path.abspath('../')) sys.path.insert(0, os.path.abspath('./')) # -- General configuration # ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # # source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Masakari API Reference' copyright = '2017-present, OpenStack Foundation' # openstackdocstheme optionns openstackdocs_repo_name = 'openstack/masakari' openstackdocs_bug_project = 'masakari' openstackdocs_auto_name = False # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # The reST default role (used for this markup: `text`) to use # for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # -- Options for man page output # ---------------------------------------------- # Grouping the document tree for man pages. # List of tuples 'sourcefile', 'target', 'title', 'Authors name', 'manual' # -- Options for HTML output # -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page # names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all # pages will # contain a tag referring to it. The value of this option must # be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. # ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'masakaridoc' # -- Options for LaTeX output # ------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [('index', 'Masakari.tex', 'OpenStack Masakari API Documentation', 'OpenStack Foundation', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are # parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/failover-segments.inc0000664000175100017510000001723315033036143022404 0ustar00mylesmyles.. -*- rst -*- ============================ FailoverSegments (segments) ============================ **Segments** System can be zoned from top to down levels, into Regions, Availability Zones and Host Aggregates (or Cells). Within those zones, one or more pacemaker/pacemaker-remote clusters may exist. In addition to those boundaries, shared storage boundary is also important to decide the optimal host for fail-over. Openstack zoned boundaries (such as Regions, AZ, Host Aggregates, etc..) can be managed by the nova scheduler. However, shared storage boundaries are difficult to manage. Moreover, the operator may want to use other types of boundary such as rack layout and powering. Therefore, operator may want to define the segment of hypervisor hosts and assign the failover host/hosts for each of them. Those segment can be define based on the shared storage boundaries or any other limitations may critical for selection of the failover host. Lists, creates, shows details for, updates, and deletes segments. List FailoverSegments ===================== .. rest_method:: GET /segments Lists IDs, names, description, recovery_method, service_type, enabled for all segments. Segments contains `service_type`, `recovery_method` and `enabled` attributes. `service_type` attribute indicates for which service (e.g. compute, cinder etc) this segment belongs to. `recovery_method` attribute indicates the recovery action to be followed when any host in a segment goes down. `enabled` attribute indicates whether notifications which belong to this segment will be dealt with. The possible `recovery_method` values are: - ``auto``. Auto recovery action. - ``reserved_host``. Reserved host recovery action. - ``auto_priority``. First executes auto and if auto fails then retried with reserved host recovery action. - ``rh_priority``. First executes reserved host and if it fails then retried with auto recovery action. You can filter on the `service_type`, `recovery_method` and `enabled` when you complete a list segments request. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - limit: limit - marker: marker - recovery_method: recovery_method_query_segment - service_type: service_type_query_segment - enabled: segment_enabled - sort_dir: sort_dir - sort_key: sort_key_segment Response -------- .. rest_parameters:: parameters.yaml - segments: segments - name: segment_name - uuid: segment_uuid **Example List Segments** .. literalinclude:: ../../doc/api_samples/segments/segments-list-resp.json :language: javascript Create Segment ============== .. rest_method:: POST /segments Creates a segment. Creates a FailoverSegment with name, description, service_type, enabled and recovery_method. For `service_type` user can mention the name of service for which this segment is created. As of now user can mention `COMPUTE` as `service_type`. For `recovery_method` user can mention either `auto`, `reserved_host`, `auto_priority` or `rh_priority`. Segment name should be unique. For `enabled` user can mention `true` or `false` to enable/disable this segment. Response Codes -------------- .. rest_status_code:: success status.yaml - 202 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 409 .. A conflict(409) is returned if segment with same name is already present. Request ------- .. rest_parameters:: parameters.yaml - segment: segment - description: segment_description - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - enabled: segment_enabled **Example Create Segment** .. literalinclude:: ../../doc/api_samples/segments/segment-create-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - segment: segment - created: created - description: segment_description - id: segment_id - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - enabled: segment_enabled - updated: updated - uuid: segment_uuid **Example Create Segment** .. literalinclude:: ../../doc/api_samples/segments/segment-create-resp.json :language: javascript Show Segment Details ==================== .. rest_method:: GET /segments/{segment_id} Shows details for a segment. **Preconditions** The segment must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path Response -------- .. rest_parameters:: parameters.yaml - segment: segment - created: created - description: segment_description - id: segment_id - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - enabled: segment_enabled - updated: updated - uuid: segment_uuid **Example Show Segment Details** .. literalinclude:: ../../doc/api_samples/segments/segment-get-resp.json :language: javascript Update Segment ============== .. rest_method:: PUT /segments/{segment_id} Updates the editable attributes of an existing segment. **Preconditions** - The segment must exist. - User can not update segment if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to update segment name which is already assigned to segment or if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - description: segment_description - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - enabled: segment_enabled **Example Update segment name** .. literalinclude:: ../../doc/api_samples/segments/segment-update-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - segment: segment - created: created - description: segment_description - id: segment_id - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - enabled: segment_enabled - updated: updated - uuid: segment_uuid **Example Update Segment name** .. literalinclude:: ../../doc/api_samples/segments/segment-update-resp.json :language: javascript Delete Segment ============== .. rest_method:: DELETE /segments/{segment_id} Deletes a segment. **Preconditions** - The segment must exist. - User can not delete segment if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 204 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to delete the segment if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path Response -------- There is no body content for the response of a successful DELETE query. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/hosts.inc0000664000175100017510000001566515033036143020121 0ustar00mylesmyles.. -*- rst -*- ============== Hosts (hosts) ============== **Hosts** A host belongs to segment. Host can be any kind of virtual machine which can have compute service running on it. Lists, creates, shows details for, updates, and deletes hosts. List Hosts ========== .. rest_method:: GET /segments/{segment_id}/hosts Lists IDs, names, type, reserved, on_maintenance for all hosts. You can filter on the type, on_maintenance and reserved when you complete a list hosts request. **Preconditions** The segment must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - limit: limit - marker: marker - on_maintenance: on_maintenance_query_host - reserved: reserved_query_host - sort_dir: sort_dir - sort_key: sort_key_host - type: type_query_host Response -------- .. rest_parameters:: parameters.yaml - hosts: hosts - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example List hosts** .. literalinclude:: ../../doc/api_samples/hosts/hosts-list-resp.json :language: javascript Create Host =========== .. rest_method:: POST /segments/{segment_id}/hosts Creates a host under given segment. Creates a Host under given segment with name, type, control_attributes. User can set sepcific hosts as reserved by setting reserved attribute to True. By default `on_maintenance` mode which indicates whether host is under maintenance or not is False when host is created. Host name should be equal to nova-compute host name from nova service list and host name from the corosync cluster. **Preconditions** The segment must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 201 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if host with same name is already present under given segment. BadRequest (400) is returned if host doesn't exists in nova. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host: host - type: host_type - name: host_name - control_attributes: control_attributes - reserved: reserved - on_maintenance: on_maintenance **Example Create Host** .. literalinclude:: ../../doc/api_samples/hosts/host-create-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - host: host - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example Create Host** .. literalinclude:: ../../doc/api_samples/hosts/host-create-resp.json :language: javascript Show Host Details ================= .. rest_method:: GET /segments/{segment_id}/hosts/{host_id} Shows details for a host. **Preconditions** The segment must exist. The host must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host_id: host_id_path Response -------- .. rest_parameters:: parameters.yaml - host: host - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example Show Host Details** .. literalinclude:: ../../doc/api_samples/hosts/host-get-resp.json :language: javascript Update Host =========== .. rest_method:: PUT /segments/{segment_id}/hosts/{host_id} Updates the editable attributes of an existing host. **Preconditions** - The segment must exist. - The host must exist. - User can not update host if that host or any host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to update host name which is already assigned to host under given segment or user tries to update the host or any other host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. BadRequest (400) is returned if host doesn't exists in nova. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host_id: host_id_path - type: host_type - name: segment_name - on_maintenance: on_maintenance - reserved: reserved **Example Update host reserved flag** .. literalinclude:: ../../doc/api_samples/hosts/host-update-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - host: host - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example Update host reserved flag** .. literalinclude:: ../../doc/api_samples/hosts/host-update-resp.json :language: javascript Delete Host =========== .. rest_method:: DELETE /segments/{segment_id}/hosts/{host_id} Deletes a host from given segment. **Preconditions** - The segment must exist. - The host must exist. - User can not delete host if that host or any host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 204 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to delete the host or any other host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host_id: host_id_path Response -------- There is no body content for the response of a successful DELETE query. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/index.rst0000664000175100017510000000047315033036143020116 0ustar00mylesmyles:tocdepth: 2 ============== Masakari API ============== This is a reference for the OpenStack Masakari API which is provided by the Masakari project. .. rest_expand_all:: .. include:: versions.inc .. include:: failover-segments.inc .. include:: hosts.inc .. include:: notifications.inc .. include:: vmoves.inc ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/notifications.inc0000664000175100017510000001246115033036143021621 0ustar00mylesmyles.. -*- rst -*- ============================== Notifications (notifications) ============================== **Notifications** A notification is a kind of alert provided by monitoring services (masakari-monitors) for failure of either host, process or instance. Lists, creates and shows details for notifications. List Notifications ================== .. rest_method:: GET /notifications Lists IDs, notification types, host_name, generated_time, payload and status for all notifications. Notifications contain a `status` attribute that indicates the current notification state. You can filter on the notification `status` when you complete a list notifications request. The notification `status` is returned in the response body. The possible notification `status` values are: - ``new``. The notification is in new state and yet to be processed. - ``running``. The notification is in progress. - ``finished``. The notification is completed successfully. - ``error``. The notification is ended up in error. - ``failed``. The notification is not processed successfully after failed once. - ``ignored``. The notification is ignored by masakari engine. You can also filter on the basis of `source_host_uuid`, `generated_since` and `type` when you complete a list notifications request. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - generated_since: generated_since_query_notifications - limit: limit - marker: marker - sort_dir: sort_dir - sort_key: sort_key_notification - source_host_uuid: source_host_uuid_query_notifications - type: type_query_notifications Response -------- .. rest_parameters:: parameters.yaml - notifications: notifications - notification_uuid: notification_uuid - deleted: deleted - created_at: created - updated_at: updated - status: notification_status - uuid: notification_uuid - source_host_uuid: source_host_uuid - generated_time: generated_time - type: notification_type - payload: notification_payload - id: notification_id **Example List Notifications** .. literalinclude:: ../../doc/api_samples/notifications/notifcations-list-resp.json :language: javascript Create Notification =================== .. rest_method:: POST /notifications Creates a notification. Response Codes -------------- .. rest_status_code:: success status.yaml - 202 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 409 .. A conflict(409) is returned if notification with same payload is exists or host for which notification is generated is under maintenance. BadRequest (400) is returned if notification payload is incorrect. Request ------- .. rest_parameters:: parameters.yaml - notification: notification - type: notification_type - generated_time: generated_time - payload: notification_payload - host_name: notification_host_name **Example create Process failure notification** .. literalinclude:: ../../doc/api_samples/notifications/process-notification-create-req.json :language: javascript **Example create VM failure notification** .. literalinclude:: ../../doc/api_samples/notifications/vm-notification-create-req.json :language: javascript **Example create COMPUTE_HOST failure notification** .. literalinclude:: ../../doc/api_samples/notifications/host-notification-create-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - notification: notification - type: notification_type - generated_time: generated_time - payload: notification_payload - source_host_uuid: source_host_uuid - uuid: notification_uuid - deleted: deleted - created_at: created - status: notification_status - updated_at: updated - id: notification_id **Example create Process failure notification** .. literalinclude:: ../../doc/api_samples/notifications/process-notification-create-resp.json :language: javascript **Example create VM failure notification** .. literalinclude:: ../../doc/api_samples/notifications/vm-notification-create-resp.json :language: javascript **Example create COMPUTE_HOST failure notification** .. literalinclude:: ../../doc/api_samples/notifications/host-notification-create-resp.json :language: javascript Show Notification Details ========================= .. rest_method:: GET /notifications/{notification_id} Shows details for a notification. **Preconditions** The notification must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - notification_id: notification_id_path Response -------- .. rest_parameters:: parameters.yaml - notification: notification - type: notification_type - generated_time: generated_time - payload: notification_payload - source_host_uuid: source_host_uuid - uuid: notification_uuid - deleted: deleted - created_at: created - status: notification_status - updated_at: updated - recovery_workflow_details: recovery_workflow_details - id: notification_id **Example Show Notification Details** .. literalinclude:: ../../doc/api_samples/notifications/notification-get-resp.json :language: javascript ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/parameters.yaml0000664000175100017510000003053515033036143021306 0ustar00mylesmyles--- # variables in path api_version: in: path required: true type: string description: > The API version as returned in the links from the ``GET /`` call. host_id_path: description: | The UUID of the host. in: path required: true type: string notification_id_path: description: | The UUID of the notification. in: path required: true type: string segment_id_path: description: | The UUID of the segment. in: path required: true type: string vmove_id_path: description: | The UUID of the vmove. in: path required: true type: string # variables in query generated_since_query_notifications: description: | Filter the notifications list result by notification generated time. in: query required: false type: string limit: description: | Requests a page size of items. Returns a number of items up to a limit value. Use the ``limit`` parameter to make an initial limited request and use the ID of the last-seen item from the response as the ``marker`` parameter value in a subsequent limited request. in: query required: false type: integer marker: description: | The ID of the last-seen item. Use the ``limit`` parameter to make an initial limited request and use the ID of the last-seen item from the response as the ``marker`` parameter value in a subsequent limited request. in: query required: false type: string on_maintenance_query_host: description: | Filter the host list result by on_maintenance. in: query required: false type: boolean recovery_method_query_segment: description: | Filter the segment list result by recovery_method. in: query required: false type: string reserved_query_host: description: | Filter the host list result by reserved flag. in: query required: false type: boolean service_type_query_segment: description: | Filter the segment list result by service_type. in: query required: false type: string sort_dir: description: | Sort direction. A valid value is ``asc`` (ascending) or ``desc`` (descending). Default is ``desc``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the direction of the segment ``sort_key`` attribute. in: query required: false type: string sort_key_host: description: | Sorts by a hosts attribute. Default attribute is ``created_at``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the segment ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``type`` - ``name`` - ``updated_at`` - ``uuid`` - ``reserved`` - ``on_maintenance`` in: query required: false type: string sort_key_notification: description: | Sorts by a notification attribute. Default attribute is ``created_at``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the segment ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``type`` - ``generated_time`` - ``updated_at`` - ``uuid`` - ``payload`` - ``status`` - ``source_host_uuid`` in: query required: false type: string sort_key_segment: description: | Sorts by a segment attribute. Default attribute is ``created_at``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the segment ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``description`` - ``name`` - ``updated_at`` - ``uuid`` - ``recovery_method`` - ``service_type`` in: query required: false type: string sort_key_vmove: description: | Sorts by a vmove attribute. Default attribute is ``created_at``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the vmove ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``updated_at`` - ``uuid`` - ``start_time`` - ``type`` - ``status`` in: query required: false type: string source_host_uuid_query_notifications: description: | Filter the notifications list result by source_host_uuid. in: query required: false type: string status_query_vmove: description: | Filter the vmoves list result by vmove status. in: query required: false type: string type_query_host: description: | Filter the host list result by type of host. in: query required: false type: boolean type_query_notifications: description: | Filter the notifications list result by notification type. in: query required: false type: string type_query_vmove: description: | Filter the vmoves list result by vmove type. in: query required: false type: string # variables in body control_attributes: description: | Attributes to control host. in: body required: true type: string created: description: | The date and time when the resource was created. The date and time stamp format is `ISO 8601 `_ :: CCYY-MM-DDThh:mm:ss±hh:mm For example, ``2017-04-21T09:49:58-05:00``. The ``±hh:mm`` value, if included, is the time zone as an offset from UTC. In the previous example, the offset value is ``-05:00``. in: body required: true type: string deleted: description: | A boolean indicates whether this resource is deleted or not, if it has not been deleted, ``false`` will appear. in: body required: true type: boolean generated_time: description: | The date and time when the notification was created. The date and time stamp format is `ISO 8601 `_ :: CCYY-MM-DDThh:mm:ss±hh:mm For example, ``2017-04-21T09:49:58-05:00``. The ``±hh:mm`` value, if included, is the time zone as an offset from UTC. In the previous example, the offset value is ``-05:00``. in: body required: true type: string host: description: | A ``host`` object. in: body required: true type: object host_id: description: | ID of host. in: body required: true type: string host_name: description: | The host name. in: body required: true type: string host_type: description: | Type of host. in: body required: true type: string host_uuid: description: | The UUID of the host. in: body required: true type: string hosts: description: | A list of ``host`` objects. in: body required: true type: array links: description: | Links to the resources in question. in: body required: true type: array notification: description: | A ``notification`` object. in: body required: true type: object notification_host_name: description: | A name of host for which notification is created. in: body required: true type: object notification_id: description: | ID of notification. in: body required: true type: string notification_payload: description: | Payload for notification. .. note:: This is a JSON string. in: body required: true type: string notification_status: description: | The notification status. in: body required: true type: string notification_type: description: | Type of notification, can be either ``PROCESS``, ``COMPUTE_HOST`` or ``VM``. in: body required: true type: string notification_uuid: description: | The UUID of the notification. in: body required: true type: string notifications: description: | A list of ``notification`` objects. in: body required: true type: array on_maintenance: description: | A boolean indicates whether this host is on maintenance or not, if it is not on maintenance mode, ``false`` will appear. in: body required: false type: boolean recovery_workflow_details: description: | Recovery workflow details of the notification. This is a list of dictionary. ``New in version 1.1`` in: body required: true type: array reserved: description: | A boolean indicates whether this host is reserved or not, if it is not reserved, ``false`` will appear. in: body required: false type: boolean segment: description: | A ``segment`` object. in: body required: true type: object segment_description: type: string in: body required: false description: | A free form description of the segment. Limited to 255 characters in length. segment_enabled: type: boolean in: body required: false description: | Boolean whether this segment is enabled or not. segment_id: description: | The Id of the segment. in: body required: true type: string segment_name: description: | The segment name. in: body required: true type: string segment_recovery_method: type: string in: body required: true description: | Type of recovery if any host in this segment goes down. User can mention either 'auto', 'reserved_host', 'auto_priority' or 'rh_priority'. segment_service_type: type: string in: body required: true description: | The name of service which will be deployed in this segment. As of now user can mention 'COMPUTE' as service_type. segment_uuid: description: | The UUID of the segment. in: body required: true type: string segments: description: | A list of ``segment`` objects. in: body required: true type: array source_host_uuid: description: | The UUID of host for which notification is generated. in: body required: true type: string updated: description: | The date and time when the resource was updated. The date and time stamp format is `ISO 8601 `_ :: CCYY-MM-DDThh:mm:ss±hh:mm For example, ``2017-04-21T09:49:58-05:00``. The ``±hh:mm`` value, if included, is the time zone as an offset from UTC. In the previous example, the offset value is ``-05:00``. in: body required: true type: string version: description: | The version. in: body required: true type: string version_id: type: string in: body required: true description: > A common name for the version in question. Informative only, it has no real semantic meaning. version_max: type: string in: body required: true description: > The maximum version supported by API. version_min: type: string in: body required: true description: > The minimum version supported by API. version_status: type: string in: body required: true description: | The status of this API version. This can be one of: - ``CURRENT``: this is the preferred version of the API to use - ``SUPPORTED``: this is an older, but still supported version of the API - ``DEPRECATED``: a deprecated version of the API that is slated for removal versions: type: array in: body required: true description: > A list of version objects that describe the API versions available. vmove: description: | A ``vmove`` object. in: body required: true type: object vmove_end_time: description: | The date and time when the vmove ended. in: body required: true type: string vmove_id: description: | ID of vmove. in: body required: true type: string vmove_instance_name: description: | The name of the instance. in: body required: true type: string vmove_instance_uuid: description: | The UUID of the instance. in: body required: true type: string vmove_message: description: | The vmove message info. in: body required: true type: string vmove_start_time: description: | The date and time when the vmove started. in: body required: true type: string vmove_status: description: | The vmove status. in: body required: true type: string vmove_type: description: | The vmove type. in: body required: true type: string vmove_uuid: description: | The UUID of the vmove. in: body required: true type: string vmoves: description: | A list of ``vmove`` objects. in: body required: true type: array ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/status.yaml0000664000175100017510000000340715033036143020464 0ustar00mylesmyles--- ################# # Success Codes # ################# 200: default: | Request was successful. 201: default: | Resource was created and is ready to use. 202: default: | Request was accepted for processing, but the processing has not been completed. A 'location' header is included in the response which contains a link to check the progress of the request. 204: default: | The server has fulfilled the request by deleting the resource. 300: default: | There are multiple choices for resources. The request has to be more specific to successfully retrieve one of these resources. 302: default: | The response is about a redirection hint. The header of the response usually contains a 'location' value where requesters can check to track the real location of the resource. ################# # Error Codes # ################# 400: default: | Some content in the request was invalid. resource_signal: | The target resource doesn't support receiving a signal. 401: default: | User must authenticate before making a request. 403: default: | Policy does not allow current user to do this operation. 404: default: | The requested resource could not be found. 405: default: | Method is not valid for this endpoint. 409: default: | This operation conflicted with another operation on this resource. duplicate_zone: | There is already a zone with this name. 500: default: | Something went wrong inside the service. This should not happen usually. If it does happen, it means the server has experienced some serious problems. 503: default: | Service is not available. This is mostly caused by service configuration errors which prevents the service from successful start up. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/versions.inc0000664000175100017510000000422015033036143020612 0ustar00mylesmyles.. -*- rst -*- ============== API Versions ============== In order to bring new features to users over time, the Masakari API supports versioning. - ''major versions'', which have dedicated urls The Version APIs work differently from other APIs as they *do not* require authentication. List All Major Versions ======================= .. rest_method:: GET / This fetches all the information about all known major API versions in the deployment. Links to more specific information will be provided for each API version. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 Response -------- .. rest_parameters:: parameters.yaml - versions: versions - id: version_id - status: version_status - links: links - version: version_max - min_version: version_min .. note:: The ``updated`` parameter in the response is vestigial and provides no useful information. Response Example ---------------- This demonstrates the expected response from a bleeding edge server that supports up to the current version. .. literalinclude:: /../../doc/api_samples/versions/versions-get-resp.json :language: javascript Show Details of Specific API Version ==================================== .. rest_method:: GET /{api_version}/ This gets the details of a specific API at its root. Nearly all this information exists at the API root, so this is mostly a redundant operation. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 Request ------- .. rest_parameters:: parameters.yaml - api_version: api_version Response -------- .. rest_parameters:: parameters.yaml - version: version - id: version_id - status: version_status - links: links - version: version_max - min_version: version_min .. note:: The ``updated`` and ``media-types`` parameters in the response are vestigial and provide no useful information. They will probably be deprecated and removed in the future. Response Example ---------------- This is an example of a ``GET /v1/`` on a relatively current server. .. literalinclude:: /../../doc/api_samples/versions/v1-version-get-resp.json :language: javascript ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/api-ref/source/vmoves.inc0000664000175100017510000000643415033036143020272 0ustar00mylesmyles.. -*- rst -*- ================ VMoves (vmoves) ================ **VMoves** A vmove belongs to one host failure notification. Lists, shows details for vmoves. List VMoves ============ .. rest_method:: GET /notification/{notification_id}/vmoves Lists IDs, notification_id, instance_id, source_host, dest_host, start_time, end_time, status and type for all VM moves. Vmoves contain a `type` attribute that indicates the current vmove type. The possible vmove `type` values are: - ``evacuation``. The vmove is one evacuation. - ``migration``. The vmove is one migration. - ``live_migration``. The vmove is one live_migration. Vmoves contain a `status` attribute that indicates the current vmove state. The possible vmove `status` values are: - ``pending``. The vmove is in pending state and yet to be processed. - ``ongoing``. The vmove is in progress. - ``succeeded``. The vmove is processed successfully. - ``failed``. The vmove is processed failed. - ``ignored``. The vmove is ignored for some reason. You can filter on the `type` and `status` when you complete a list vmoves request. **Preconditions** The notification must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 .. BadRequest (400) is returned if the notification type is not `COMPUTE_NODE`. Request ------- .. rest_parameters:: parameters.yaml - notification_id: notification_id_path - limit: limit - marker: marker - sort_dir: sort_dir - sort_key: sort_key_vmove - status: status_query_vmove - type: type_query_vmove Response -------- .. rest_parameters:: parameters.yaml - vmoves: vmoves - created_at: created - updated_at: updated - deleted: deleted - id: vmove_id - uuid: vmove_uuid - notification_uuid: notification_uuid - instance_uuid: vmove_instance_uuid - instance_name: vmove_instance_name - source_host: host_name - dest_host: host_name - start_time: vmove_start_time - end_time: vmove_end_time - status: vmove_status - type: vmove_type - message: vmove_message **Example List vmoves** .. literalinclude:: ../../doc/api_samples/vmoves/vmoves-list-resp.json :language: javascript Show VMove Details =================== .. rest_method:: GET /notifications/{notification_id}/vmoves/{vmove_id} Shows details for a vmove. **Preconditions** The notification must exist. The vmove must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - notification_id: notification_id_path - vmove_id: vmove_id_path Response -------- .. rest_parameters:: parameters.yaml - vmove: vmove - created_at: created - updated_at: updated - deleted: deleted - id: vmove_id - uuid: vmove_uuid - notification_uuid: notification_uuid - instance_uuid: vmove_instance_uuid - instance_name: vmove_instance_name - source_host: host_name - dest_host: host_name - start_time: vmove_start_time - end_time: vmove_end_time - status: vmove_status - type: vmove_type - message: vmove_message **Example Show VMove Details** .. literalinclude:: ../../doc/api_samples/vmoves/vmove-get-resp.json :language: javascript ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/bindep.txt0000664000175100017510000000137615033036143015437 0ustar00mylesmyles# This is a cross-platform list tracking distribution packages needed for install and tests; # see https://docs.openstack.org/infra/bindep/ for additional information. # libpq-dev is needed by openstack-tox-py37 build only. libpq-dev [platform:dpkg test] python3-dev [platform:dpkg test] libmysqlclient-dev [platform:dpkg test !platform:debian] libmariadb-dev-compat [platform:debian] mysql [platform:rpm test] mysql-client [platform:dpkg test !platform:debian] mysql-devel [platform:rpm test] mysql-server [test !platform:debian] mariadb-server [platform:debian] postgresql [test] postgresql-client [platform:dpkg test] postgresql-devel [platform:rpm test] postgresql-server [platform:rpm test] libsqlite3-dev [platform:dpkg test] sqlite-devel [platform:rpm test] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5217102 masakari-19.1.0.dev18/devstack/0000775000175100017510000000000015033036146015235 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/devstack/README.rst0000664000175100017510000000156615033036143016731 0ustar00mylesmyles============================= Enabling Masakari in DevStack ============================= To enable Masakari in DevStack, perform the following steps: Download DevStack ================= .. sourcecode:: bash export DEVSTACK_DIR=~/devstack git clone https://opendev.org/openstack/devstack.git $DEVSTACK_DIR Enable the Masakari plugin ========================== Enable the plugin by adding the following section to ``$DEVSTACK_DIR/local.conf`` .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Optionally, a git refspec (branch or tag or commit) may be provided as follows: .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Run the DevStack utility ======================== .. sourcecode:: bash cd $DEVSTACK_DIR ./stack.sh ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/devstack/plugin.sh0000664000175100017510000003167115033036143017074 0ustar00mylesmyles#!/bin/bash # # lib/masakari # Functions to control the configuration and operation of the **Masakari** service # Dependencies: # ``functions`` file # ``DEST``, ``STACK_USER`` must be defined # ``SERVICE_{HOST|PROTOCOL|TOKEN}`` must be defined # ``stack.sh`` calls the entry points in this order: # # masakari-api # install - install_masakari # post-config - configure_masakari # extra - init_masakari start_masakari # unstack - stop_masakari cleanup_masakari # # masakari-engine # install - install_masakari # post-config - configure_masakari # extra - init_masakari start_masakari # unstack - stop_masakari cleanup_masakari # # masakari-monitors # post-config - configure_masakarimonitors # extra - run_masakarimonitors # unstack - stop_masakari_monitors cleanup_masakari_monitors # Save trace setting XTRACE=$(set +o | grep xtrace) set -o xtrace if is_service_enabled tls-proxy; then MASAKARI_SERVICE_PROTOCOL="https" fi # Toggle for deploying Masakari under a wsgi server. MASAKARI_USE_MOD_WSGI=${MASAKARI_USE_MOD_WSGI:-True} # Functions # --------- # setup_masakari_logging() - Adds logging configuration to conf files function setup_masakari_logging { local CONF=$1 iniset $CONF DEFAULT debug $ENABLE_DEBUG_LOG_LEVEL iniset $CONF DEFAULT use_syslog $SYSLOG if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then # Add color to logging output setup_colorized_logging $CONF DEFAULT tenant user fi } # create_masakari_accounts() - Set up common required masakari accounts # Tenant User Roles # ------------------------------------------------------------------ # service masakari admin # if enabled function create_masakari_accounts { if [[ "$ENABLED_SERVICES" =~ "masakari" ]]; then create_service_user "$USERNAME" "admin" local masakari_service masakari_service=$(get_or_create_service "masakari" \ "instance-ha" "OpenStack High Availability") if [ "$MASAKARI_USE_MOD_WSGI" == "False" ]; then get_or_create_endpoint $masakari_service \ "$REGION_NAME" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST:$MASAKARI_SERVICE_PORT/v1/\$(tenant_id)s" else get_or_create_endpoint $masakari_service \ "$REGION_NAME" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST/instance-ha/v1/\$(tenant_id)s" fi fi } # stack.sh entry points # --------------------- # cleanup_masakari() - Remove residual data files, anything left over from previous # runs that a clean run would need to clean up function cleanup_masakari { # Clean up dirs rm -fr $MASAKARI_CONF_DIR/* if [ "$MASAKARI_USE_MOD_WSGI" == "True" ]; then remove_uwsgi_config "$MASAKARI_UWSGI_CONF" "$MASAKARI_UWSGI" fi } # cleanup_masakari_monitors() - Remove residual data files, anything left over from previous # runs that a clean run would need to clean up function cleanup_masakari_monitors { # Clean up dirs rm -fr $MASAKARI_MONITORS_CONF_DIR/* } # iniset_conditional() - Sets the value in the inifile, but only if it's # actually got a value function iniset_conditional { local FILE=$1 local SECTION=$2 local OPTION=$3 local VALUE=$4 if [[ -n "$VALUE" ]]; then iniset ${FILE} ${SECTION} ${OPTION} ${VALUE} fi } # configure_masakari() - Set config files, create data dirs, etc function configure_masakari { setup_develop $MASAKARI_DIR # Create the masakari conf dir if it doesn't exist sudo install -d -o $STACK_USER ${MASAKARI_CONF_DIR} # Copy api-paste file over to the masakari conf dir cp $MASAKARI_LOCAL_API_PASTE_INI $MASAKARI_API_PASTE_INI # (Re)create masakari conf files rm -f $MASAKARI_CONF # (Re)create masakari api conf file if needed if is_service_enabled masakari-api; then oslo-config-generator --namespace keystonemiddleware.auth_token \ --namespace masakari \ --namespace oslo.db \ > $MASAKARI_CONF # Set common configuration values (but only if they're defined) iniset $MASAKARI_CONF DEFAULT masakari_api_workers "$API_WORKERS" iniset $MASAKARI_CONF database connection `database_connection_url masakari` # Set taskflow connection to store the recovery workflow details in db iniset $MASAKARI_CONF taskflow connection `database_connection_url masakari` setup_masakari_logging $MASAKARI_CONF configure_keystone_authtoken_middleware $MASAKARI_CONF masakari fi # Set os_privileged_user credentials (used for connecting nova service) iniset $MASAKARI_CONF DEFAULT os_privileged_user_name nova iniset $MASAKARI_CONF DEFAULT os_privileged_user_auth_url "$KEYSTONE_SERVICE_URI" iniset $MASAKARI_CONF DEFAULT os_privileged_user_password "$SERVICE_PASSWORD" iniset $MASAKARI_CONF DEFAULT os_privileged_user_tenant "$SERVICE_PROJECT_NAME" iniset $MASAKARI_CONF DEFAULT graceful_shutdown_timeout "$SERVICE_GRACEFUL_SHUTDOWN_TIMEOUT" iniset_rpc_backend masakari $MASAKARI_CONF DEFAULT if is_service_enabled tls-proxy; then iniset $MASAKARI_CONF DEFAULT masakari_api_listen_port $MASAKARI_SERVICE_PORT_INT fi if [ "$MASAKARI_USE_MOD_WSGI" == "True" ]; then write_uwsgi_config "$MASAKARI_UWSGI_CONF" "$MASAKARI_UWSGI" "/instance-ha" "" "masakari-api" fi } # configure_masakarimonitors() - Set config files, create data dirs, etc function configure_masakarimonitors { git_clone $MASAKARI_MONITORS_REPO $MASAKARI_MONITORS_DIR $MASAKARI_MONITORS_BRANCH # Create masakarimonitors conf dir if it doesn't exist sudo install -d -o $STACK_USER ${MASAKARI_MONITORS_CONF_DIR} setup_develop $MASAKARI_MONITORS_DIR # (Re)create masakarimonitors conf files rm -f $MASAKARI_MONITORS_CONF # (Re)create masakarimonitors api conf file if needed oslo-config-generator --namespace masakarimonitors.conf \ --namespace oslo.log \ --namespace oslo.middleware \ > $MASAKARI_MONITORS_CONF iniset $MASAKARI_MONITORS_CONF api auth_url "$KEYSTONE_SERVICE_URI" iniset $MASAKARI_MONITORS_CONF api password "$SERVICE_PASSWORD" iniset $MASAKARI_MONITORS_CONF api project_name "$SERVICE_PROJECT_NAME" iniset $MASAKARI_MONITORS_CONF api username "$USERNAME" iniset $MASAKARI_MONITORS_CONF api user_domain_id "$SERVICE_DOMAIN_ID" iniset $MASAKARI_MONITORS_CONF api project_domain_id "$SERVICE_DOMAIN_ID" iniset $MASAKARI_MONITORS_CONF api region "$REGION_NAME" iniset $MASAKARI_MONITORS_CONF process process_list_path "$MASAKARI_MONITORS_CONF_DIR/process_list.yaml" touch $MASAKARI_MONITORS_CONF_DIR/process_list.yaml } # install_masakari() - Collect source and prepare function install_masakari { setup_develop $MASAKARI_DIR } # init_masakari() - Initializes Masakari Database as a Service function init_masakari { # (Re)Create masakari db recreate_database masakari # Initialize the masakari database $MASAKARI_MANAGE db sync # Add an admin user to the 'tempest' alt_demo tenant. # This is needed to test the guest_log functionality. # The first part mimics the tempest setup, so make sure we have that. ALT_USERNAME=${ALT_USERNAME:-alt_demo} ALT_TENANT_NAME=${ALT_TENANT_NAME:-alt_demo} get_or_create_project ${ALT_TENANT_NAME} default get_or_create_user ${ALT_USERNAME} "$ADMIN_PASSWORD" "default" "alt_demo@example.com" get_or_add_user_project_role member ${ALT_USERNAME} ${ALT_TENANT_NAME} # The second part adds an admin user to the tenant. ADMIN_ALT_USERNAME=${ADMIN_ALT_USERNAME:-admin_${ALT_USERNAME}} get_or_create_user ${ADMIN_ALT_USERNAME} "$ADMIN_PASSWORD" "default" "admin_alt_demo@example.com" get_or_add_user_project_role admin ${ADMIN_ALT_USERNAME} ${ALT_TENANT_NAME} } # start_masakari() - Start running processes function start_masakari { local masakari_url if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then if [ "$MASAKARI_USE_MOD_WSGI" == "False" ]; then run_process masakari-api "$MASAKARI_BIN_DIR/masakari-api --config-file=$MASAKARI_CONF --debug" masakari_url=$MASAKARI_SERVICE_PROTOCOL://$MASAKARI_SERVICE_HOST:$MASAKARI_SERVICE_PORT # Start proxy if tls enabled if is_service_enabled tls_proxy; then start_tls_proxy masakari-service '*' $MASAKARI_SERVICE_PORT $SERVICE_HOST $MASAKARI_SERVICE_PORT_INT fi else run_process "masakari-api" "$(which uwsgi) --procname-prefix masakari-api --ini $MASAKARI_UWSGI_CONF" masakari_url=$MASAKARI_SERVICE_PROTOCOL://$MASAKARI_SERVICE_HOST/instance-ha/v1 fi echo "Waiting for Masakari API to start..." if ! wait_for_service $SERVICE_TIMEOUT $masakari_url; then die $LINENO "masakari-api did not start" fi fi if [[ "$ENABLED_SERVICES" =~ "masakari-engine" ]]; then run_process masakari-engine "$MASAKARI_BIN_DIR/masakari-engine --config-file=$MASAKARI_CONF --debug" fi } #install masakari-dashboard function install_masakaridashboard { git_clone $MASAKARI_DASHBOARD_REPO $MASAKARI_DASHBOARD_DIR $MASAKARI_DASHBOARD_BRANCH setup_develop $MASAKARI_DASHBOARD_DIR ln -fs $MASAKARI_DASHBOARD_DIR/masakaridashboard/local/enabled/_50_masakaridashboard.py \ $HORIZON_DIR/openstack_dashboard/local/enabled ln -fs $MASAKARI_DASHBOARD_DIR/masakaridashboard/local/local_settings.d/_50_masakari.py \ $HORIZON_DIR/openstack_dashboard/local/local_settings.d ln -fs $MASAKARI_DASHBOARD_DIR/masakaridashboard/conf/masakari_policy.yaml \ $HORIZON_DIR/openstack_dashboard/conf } #uninstall masakari-dashboard function uninstall_masakaridashboard { sudo rm -f $DEST/horizon/openstack_dashboard/local/enabled/_50_masakaridashboard.py sudo rm -f $DEST/horizon/openstack_dashboard/local/local_settings.d/_50_masakari.py sudo rm -f $DEST/horizon/openstack_dashboard/conf/masakari_policy.yaml restart_apache_server } # stop_masakari() - Stop running processes function stop_masakari { # Kill the masakari services local serv for serv in masakari-engine masakari-api; do stop_process $serv done } #run masakari-monitors function run_masakarimonitors { run_process masakari-processmonitor "$MASAKARI_BIN_DIR/masakari-processmonitor" run_process masakari-instancemonitor "$MASAKARI_BIN_DIR/masakari-instancemonitor" run_process masakari-introspectiveinstancemonitor "$MASAKARI_BIN_DIR/masakari-introspectiveinstancemonitor" } # stop_masakari_monitors() - Stop running processes function stop_masakari_monitors { # Kill the masakari-monitors services local serv for serv in masakari-processmonitor masakari-instancemonitor masakari-introspectiveinstancemonitor; do stop_process $serv done } # Dispatcher for masakari plugin if is_service_enabled masakari; then if [[ "$1" == "stack" && "$2" == "install" ]]; then echo_summary "Installing Masakari" if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then install_masakari fi elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then echo_summary "Configuring Masakari" if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then configure_masakari if is_service_enabled key; then create_masakari_accounts fi fi elif [[ "$1" == "stack" && "$2" == "extra" ]]; then # Initialize and Start the masakari API and masakari taskmgr components if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then init_masakari echo_summary "Starting Masakari" start_masakari fi if is_service_enabled horizon; then # install masakari-dashboard echo_summary "Installing masakari-dashboard" install_masakaridashboard fi fi if [[ "$1" == "unstack" ]]; then if is_service_enabled horizon; then echo_summary "Uninstall masakari-dashboard" uninstall_masakaridashboard fi if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then stop_masakari cleanup_masakari fi fi fi if is_service_enabled masakari-monitors; then if [[ "$1" == "stack" && "$2" == "post-config" ]]; then if is_service_enabled n-cpu; then # Configure masakari-monitors echo_summary "Configure masakari-monitors" configure_masakarimonitors fi elif [[ "$1" == "stack" && "$2" == "extra" ]]; then if is_service_enabled n-cpu; then # Run masakari-monitors echo_summary "Running masakari-monitors" run_masakarimonitors fi fi if [[ "$1" == "unstack" ]]; then if is_service_enabled n-cpu; then echo_summary "Uninstall masakari-monitors" stop_masakari_monitors cleanup_masakari_monitors fi fi fi # Restore xtrace $XTRACE # Tell emacs to use shell-script-mode ## Local variables: ## mode: shell-script ## End: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/devstack/settings0000664000175100017510000000470215033036143017020 0ustar00mylesmyles# Settings needed for Masakari plugin # ----------------------------------- # Set up default directories MASAKARI_DIR=${MASAKARI_DIR:-${DEST}/masakari} MASAKARI_REPO=${GIT_BASE}/openstack/masakari.git} MASAKARI_BRANCH=${MASAKARI_BRANCH:-master} MASAKARI_DASHBOARD_REPO=${MASAKARI_DASHBOARD_REPO:-${GIT_BASE}/openstack/masakari-dashboard.git} MASAKARI_DASHBOARD_BRANCH=${MASAKARI_DASHBOARD_BRANCH:-master} MASAKARI_MONITORS_DIR=${MASAKARI_MONITORS_DIR:-${DEST}/masakari-monitors} MASAKARI_MONITORS_REPO=${MASAKARI_MONITORS_REPO:-${GIT_BASE}/openstack/masakari-monitors.git} MASAKARI_MONITORS_BRANCH=${MASAKARI_MONITORS_BRANCH:-master} # Set up configuration directory and files MASAKARI_CONF_DIR=${MASAKARI_CONF_DIR:-/etc/masakari} MASAKARI_DASHBOARD_DIR=$DEST/masakari-dashboard MASAKARI_CONF=${MASAKARI_CONF:-${MASAKARI_CONF_DIR}/masakari.conf} MASAKARI_API_PASTE_INI=${MASAKARI_API_PASTE_INI:-${MASAKARI_CONF_DIR}/api-paste.ini} MASAKARI_POLICY_JSON=${MASAKARI_POLICY_JSON:-${MASAKARI_CONF_DIR}/policy.yaml} MASAKARI_MONITORS_CONF_DIR=${MASAKARI_MONITORS_CONF_DIR:-/etc/masakarimonitors} MASAKARI_MONITORS_CONF=${MASAKARI_MONITORS_CONF:-${MASAKARI_MONITORS_CONF_DIR}/masakarimonitors.conf} MASAKARI_LOCAL_CONF_DIR=${MASAKARI_LOCAL_CONF_DIR:-${MASAKARI_DIR}/etc/masakari} MASAKARI_LOCAL_API_PASTE_INI=${MASAKARI_LOCAL_API_PASTE_INI:-${MASAKARI_LOCAL_CONF_DIR}/api-paste.ini} MASAKARI_LOCAL_POLICY_JSON=${MASAKARI_LOCAL_POLICY_JSON:-${MASAKARI_LOCAL_CONF_DIR}/policy.yaml} MASAKARI_AUTH_CACHE_DIR=${MASAKARI_AUTH_CACHE_DIR:-/var/cache/masakari} MASAKARI_SERVICE_HOST=${MASAKARI_SERVICE_HOST:-$SERVICE_HOST} MASAKARI_SERVICE_PROTOCOL=${MASAKARI_SERVICE_PROTOCOL:-http} # set environment variables USERNAME="masakari" SERVICE_DOMAIN_ID="default" # Support entry points installation of console scripts if [[ -d $MASAKARI_DIR/bin ]]; then MASAKARI_BIN_DIR=$MASAKARI_DIR/bin MASAKARI_MONITORS_BIN_DIR=$MASAKARI_MONITORS_DIR/bin else MASAKARI_BIN_DIR=$(get_python_exec_prefix) fi MASAKARI_MANAGE=$MASAKARI_BIN_DIR/masakari-manage # Public facing bits MASAKARI_SERVICE_PORT=${MASAKARI_SERVICE_PORT:-15868} MASAKARI_SERVICE_PORT_INT=${MASAKARI_SERVICE_PORT_INT:-25868} MASAKARI_UWSGI=masakari.wsgi.api:application MASAKARI_UWSGI_CONF=$MASAKARI_CONF_DIR/masakari-api-uwsgi.ini enable_service masakari masakari-api masakari-engine enable_service masakari-monitors masakari-processmonitor masakari-instancemonitor masakari-hostmonitor enable_service masakari-introspectiveinstancemonitor ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5217102 masakari-19.1.0.dev18/doc/0000775000175100017510000000000015033036146014176 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5187101 masakari-19.1.0.dev18/doc/api_samples/0000775000175100017510000000000015033036146016473 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52271 masakari-19.1.0.dev18/doc/api_samples/hosts/0000775000175100017510000000000015033036146017633 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/hosts/host-create-req.json0000664000175100017510000000017715033036143023533 0ustar00mylesmyles{ "host": { "control_attributes": "SSH", "type": "COMPUTE", "name": "openstack-VirtualBox" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/hosts/host-create-resp.json0000664000175100017510000000156315033036143023715 0ustar00mylesmyles{ "host": { "reserved": false, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": null, "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 2, "name": "segment2" }, "deleted_at": null, "type": "COMPUTE_HOST", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/hosts/host-get-resp.json0000664000175100017510000000156315033036143023231 0ustar00mylesmyles{ "host": { "reserved": false, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": null, "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 2, "name": "segment2" }, "deleted_at": null, "type": "COMPUTE_HOST", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/hosts/host-update-req.json0000664000175100017510000000006315033036143023544 0ustar00mylesmyles{ "host": { "reserved": "True" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/hosts/host-update-resp.json0000664000175100017510000000161115033036143023726 0ustar00mylesmyles{ "host": { "reserved": true, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": "2017-04-21T11:12:43.351320", "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "Compute", "deleted_at": null, "id": 2, "name": "new_segment" }, "deleted_at": null, "type": "COMPUTE", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/hosts/hosts-list-resp.json0000664000175100017510000000175015033036143023606 0ustar00mylesmyles{ "hosts": [ { "reserved": false, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": null, "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 2, "name": "segment2" }, "deleted_at": null, "type": "COMPUTE_HOST", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } ] }././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52271 masakari-19.1.0.dev18/doc/api_samples/notifications/0000775000175100017510000000000015033036146021344 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/host-notification-create-req.json0000664000175100017510000000045215033036143027724 0ustar00mylesmyles{ "notification": { "type": "COMPUTE_HOST", "generated_time": "2017-04-24 08:34:46", "payload": { "event": "STOPPED", "host_status": "UNKNOWN", "cluster_status": "OFFLINE" }, "hostname": "openstack-VirtualBox" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/host-notification-create-resp.json0000664000175100017510000000107615033036143030111 0ustar00mylesmyles{ "notification": { "notification_uuid": "9e66b95d-45da-4695-bfb6-ace68b35d955", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-24T06:37:37.396994", "updated_at": null, "id": 4, "generated_time": "2017-04-24T08:34:46.000000", "deleted_at": null, "type": "COMPUTE_HOST", "payload": { "host_status": "UNKNOWN", "event": "STOPPED", "cluster_status": "OFFLINE" } } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/notifcations-list-resp.json0000664000175100017510000000114315033036143026653 0ustar00mylesmyles{ "notifications": [ { "notification_uuid": "32bc95ac-858d-460a-b562-7e365391be64", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-21T12:09:44.000000", "updated_at": null, "id": 1, "generated_time": "2017-04-21T17:29:55.000000", "deleted_at": null, "type": "PROCESS", "payload": { "process_name": "nova-compute", "event": "stopped" } } ] } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/notification-get-resp.json0000664000175100017510000000432115033036143026446 0ustar00mylesmyles{ "notification": { "notification_uuid": "07a331b8-df15-4582-b121-73ed3541a408", "status": "finished", "source_host_uuid": "b5bc49be-ea6f-472d-9240-968f75d7a16a", "deleted": false, "created_at": "2019-02-28T07:19:49.000000", "updated_at": "2019-02-28T07:19:59.000000", "payload": { "instance_uuid": "b9837317-a5b8-44f4-93b4-45500c562bb8", "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "recovery_workflow_details": [ { "progress": 1.0, "state": "SUCCESS", "name": "StopInstanceTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:28.842031", "message": "Stopping instance: df528f02-2415-4a40-bad8-453ad6a519f7", "progress": "0.0"}, {"timestamp": "2019-03-07 13:54:34.442617", "message": "Stopped instance: 'df528f02-2415-4a40-bad8-453ad6a519f7'", "progress": "1.0"} ] }, { "progress": 1.0, "state": "SUCCESS", "name": "StartInstanceTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:34.531755", "message": "Starting instance: 'df528f02-2415-4a40-bad8-453ad6a519f7'", "progress": "0.0"}, {"timestamp": "2019-03-07 13:54:35.930430", "message": "Instance started: 'df528f02-2415-4a40-bad8-453ad6a519f7'", "progress": "1.0"} ] }, { "progress": 1.0, "state": "SUCCESS", "name": "ConfirmInstanceActiveTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:36.019208", "message": "Confirming instance 'df528f02-2415-4a40-bad8-453ad6a519f7' vm_state is ACTIVE", "progress": "0.0"}, {"timestamp": "2019-03-07 13:54:38.569373", "message": "Confirmed instance 'df528f02-2415-4a40-bad8-453ad6a519f7' vm_state is ACTIVE", "progress": "1.0"} ] } ], "generated_time": "2017-06-13T15:34:55.000000", "deleted_at": null, "type": "VM", "id": 13 } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/process-notification-create-req.json0000664000175100017510000000040215033036143030420 0ustar00mylesmyles{ "notification": { "type": "PROCESS", "generated_time": "2017-04-21 17:29:55", "payload": { "process_name": "nova-compute", "event": "stopped" }, "hostname": "openstack-VirtualBox" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/process-notification-create-resp.json0000664000175100017510000000102615033036143030605 0ustar00mylesmyles{ "notification": { "notification_uuid": "2b412acf-c55a-442d-8fd2-e823ec0d827f", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-24T06:05:29.387678", "updated_at": null, "id": 2, "generated_time": "2017-04-21T17:29:55.000000", "deleted_at": null, "type": "PROCESS", "payload": { "process_name": "nova-compute", "event": "stopped" } } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/vm-notification-create-req.json0000664000175100017510000000052415033036143027371 0ustar00mylesmyles{ "notification": { "type": "VM", "generated_time": "2017-04-23T07:18:51.523726", "payload": { "instance_uuid": "96ab1c42-668c-4f2d-8689-afa3301d4ee9", "vir_domain_event": "STOPPED_DESTROYED", "event": "LIFECYCLE" }, "hostname": "openstack-VirtualBox" } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/notifications/vm-notification-create-resp.json0000664000175100017510000000114115033036143027547 0ustar00mylesmyles{ "notification": { "notification_uuid": "f4836386-7648-4395-89b6-75a2c5ca7ff2", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-24T06:22:47.569979", "updated_at": null, "id": 3, "generated_time": "2017-04-23T07:18:51.523726", "deleted_at": null, "type": "VM", "payload": { "instance_uuid": "96ab1c42-668c-4f2d-8689-afa3301d4ee9", "vir_domain_event": "STOPPED_DESTROYED", "event": "LIFECYCLE" } } } ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52271 masakari-19.1.0.dev18/doc/api_samples/segments/0000775000175100017510000000000015033036146020320 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/segments/segment-create-req.json0000664000175100017510000000023015033036143024673 0ustar00mylesmyles{ "segment": { "service_type": "COMPUTE", "recovery_method": "auto", "name": "new_segment", "enabled": true } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/segments/segment-create-resp.json0000664000175100017510000000060415033036143025062 0ustar00mylesmyles{ "segment": { "uuid": "5fd9f925-0379-40db-a7f8-786a0b655b2a", "deleted": false, "created_at": "2017-04-21T08:59:53.991030", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 4, "name": "new_segment", "enabled": true } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/segments/segment-get-resp.json0000664000175100017510000000060415033036143024376 0ustar00mylesmyles{ "segment": { "uuid": "5fd9f925-0379-40db-a7f8-786a0b655b2a", "deleted": false, "created_at": "2017-04-21T08:59:53.991030", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 4, "name": "new_segment", "enabled": true } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/segments/segment-update-req.json0000664000175100017510000000012315033036143024713 0ustar00mylesmyles{ "segment": { "name": "new_segment", "enabled": false } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/segments/segment-update-resp.json0000664000175100017510000000063515033036143025105 0ustar00mylesmyles{ "segment": { "uuid": "5fd9f925-0379-40db-a7f8-786a0b655b2a", "deleted": false, "created_at": "2017-04-21T08:59:54.000000", "description": null, "recovery_method": "auto", "updated_at": "2017-04-21T09:47:03.748028", "service_type": "COMPUTE", "deleted_at": null, "id": 4, "name": "new_segment", "enabled": false } } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/segments/segments-list-resp.json0000664000175100017510000000071015033036143024753 0ustar00mylesmyles{ "segments": [ { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": "Segment1", "recovery_method": "auto", "updated_at": null, "service_type": "Compute", "deleted_at": null, "id": 1, "name": "segment2", "enabled": true } ] } ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52271 masakari-19.1.0.dev18/doc/api_samples/versions/0000775000175100017510000000000015033036146020343 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/versions/v1-version-get-resp.json0000664000175100017510000000122615033036143024771 0ustar00mylesmyles{ "version": { "id": "v1", "links": [ { "href": "http://openstack.example.com/v1/", "rel": "self" }, { "href": "http://docs.openstack.org/", "rel": "describedby", "type": "text/html" } ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.masakari+json;version=1" } ], "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2016-07-01T11:33:21Z" } }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/versions/versions-get-resp.json0000664000175100017510000000060415033036143024627 0ustar00mylesmyles{ "versions": [ { "id": "v1.0", "links": [ { "href": "http://openstack.example.com/v1/", "rel": "self" } ], "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2016-07-01T11:33:21Z" } ] }././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52271 masakari-19.1.0.dev18/doc/api_samples/vmoves/0000775000175100017510000000000015033036146020012 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/vmoves/vmove-get-resp.json0000664000175100017510000000111615033036143023561 0ustar00mylesmyles{ "vmove": { "created_at": "2023-01-28T14:55:27.000000", "updated_at": null, "deleted_at": null, "deleted": false, "id": 1, "notification_uuid": "a0e70d3a-b3a2-4616-b65d-a7c03a2c85fc", "instance_uuid": "1c2f1795-ce78-4d4c-afd0-ce141fdb3952", "instance_name": "vm1", "source_host": "host1", "dest_host": "host2", "start_time": "2023-01-28T14:55:27.000000", "end_time": "2023-01-28T14:55:31.000000", "status": "succeeded", "type": "evacuation", "message": null } }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/api_samples/vmoves/vmoves-list-resp.json0000664000175100017510000000123715033036143024144 0ustar00mylesmyles{ "vmoves": [ { "created_at": "2023-01-28T14:55:27.000000", "updated_at": null, "deleted_at": null, "deleted": false, "id": 1, "notification_uuid": "a0e70d3a-b3a2-4616-b65d-a7c03a2c85fc", "instance_uuid": "1c2f1795-ce78-4d4c-afd0-ce141fdb3952", "instance_name": "vm1", "source_host": "host1", "dest_host": "host2", "start_time": "2023-01-28T14:55:27.000000", "end_time": "2023-01-28T14:55:31.000000", "status": "succeeded", "type": "evacuation", "message": null } ] }././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52271 masakari-19.1.0.dev18/doc/ext/0000775000175100017510000000000015033036146014776 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/ext/__init__.py0000664000175100017510000000000015033036143017072 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/ext/versioned_notifications.py0000664000175100017510000001246615033036143022305 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ This provides a sphinx extension able to list the implemented versioned notifications into the developer documentation. It is used via a single directive in the .rst file .. versioned_notifications:: """ import os from docutils import nodes from docutils.parsers import rst import importlib from oslo_serialization import jsonutils import pkgutil from masakari.notifications.objects import base as notification from masakari.objects import base from masakari.tests import json_ref import masakari.utils class VersionedNotificationDirective(rst.Directive): SAMPLE_ROOT = 'doc/notification_samples/' TOGGLE_SCRIPT = """ """ def run(self): notifications = self._collect_notifications() return self._build_markup(notifications) def _import_all_notification_packages(self): list(map(lambda module: importlib.import_module(module), ('masakari.notifications.objects.' + name for _, name, _ in pkgutil.iter_modules(masakari.notifications.objects.__path__)))) def _collect_notifications(self): self._import_all_notification_packages() base.MasakariObjectRegistry.register_notification_objects() notifications = {} ovos = base.MasakariObjectRegistry.obj_classes() for name, cls in ovos.items(): cls = cls[0] if (issubclass(cls, notification.NotificationBase) and cls != notification.NotificationBase): payload_name = cls.fields['payload'].objname payload_cls = ovos[payload_name][0] for sample in cls.samples: if sample in notifications: raise ValueError('Duplicated usage of %s ' 'sample file detected' % sample) notifications[sample] = ((cls.__name__, payload_cls.__name__, sample)) return sorted(notifications.values()) def _build_markup(self, notifications): content = [] cols = ['Event type', 'Notification class', 'Payload class', 'Sample'] table = nodes.table() content.append(table) group = nodes.tgroup(cols=len(cols)) table.append(group) head = nodes.thead() group.append(head) for _ in cols: group.append(nodes.colspec(colwidth=1)) body = nodes.tbody() group.append(body) # fill the table header row = nodes.row() body.append(row) for col_name in cols: col = nodes.entry() row.append(col) text = nodes.strong(text=col_name) col.append(text) # fill the table content, one notification per row for name, payload, sample_file in notifications: event_type = sample_file[0: -5].replace('-', '.') row = nodes.row() body.append(row) col = nodes.entry() row.append(col) text = nodes.literal(text=event_type) col.append(text) col = nodes.entry() row.append(col) text = nodes.literal(text=name) col.append(text) col = nodes.entry() row.append(col) text = nodes.literal(text=payload) col.append(text) col = nodes.entry() row.append(col) with open(os.path.join(self.SAMPLE_ROOT, sample_file), 'r') as f: sample_content = f.read() sample_obj = jsonutils.loads(sample_content) sample_obj = json_ref.resolve_refs( sample_obj, base_path=os.path.abspath(self.SAMPLE_ROOT)) sample_content = jsonutils.dumps(sample_obj, sort_keys=True, indent=4, separators=(',', ': ')) event_type = sample_file[0: -5] html_str = self.TOGGLE_SCRIPT % ((event_type, ) * 3) html_str += ("" % event_type) html_str += ("
%s
" % (event_type, sample_content)) raw = nodes.raw('', html_str, format="html") col.append(raw) return content def setup(app): app.add_directive('versioned_notifications', VersionedNotificationDirective) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52371 masakari-19.1.0.dev18/doc/notification_samples/0000775000175100017510000000000015033036146020410 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/create-host-end.json0000664000175100017510000000273215033036143024266 0ustar00mylesmyles{ "event_type": "host.create.end", "timestamp": "2018-11-27 13:09:30.737034", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "d6a2d900-1977-48fd-aa52-ad7a41fc068b", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 70, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e437834a-73e1-4c47-939a-83f6aca2e7ac" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/create-host-start.json0000664000175100017510000000261215033036143024652 0ustar00mylesmyles{ "event_type": "host.create.start", "timestamp": "2018-11-27 13:09:30.716747", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "name": "fake-mini", "on_maintenance": false, "control_attributes": "TEST", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "0ed836cc-353a-40bc-b86b-d89e6632d838" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/create-notification-end.json0000664000175100017510000000143515033036143025776 0ustar00mylesmyles{ "event_type": "notification.create.end", "timestamp": "2018-11-27 13:46:25.496514", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "e6b1996f-7792-4a65-83c3-23f2d4721eb0", "status": "new", "source_host_uuid": "d4ffe3a4-b2a8-41f3-a2b0-bae3b06fc1a3", "fault": null, "id": 1, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "500447b9-4797-4090-9189-b56bc3521b75" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/create-notification-start.json0000664000175100017510000000130115033036143026355 0ustar00mylesmyles{ "event_type": "notification.create.start", "timestamp": "2018-11-27 13:46:23.060352", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "status": "new", "source_host_uuid": "d4ffe3a4-b2a8-41f3-a2b0-bae3b06fc1a3", "fault": null, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "5e2e4699-0bbd-4583-b1e2-a87c458f84eb" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/create-segment-end.json0000664000175100017510000000124515033036143024751 0ustar00mylesmyles{ "event_type": "segment.create.end", "timestamp": "2018-11-22 09:25:12.813483", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test", "service_type": "compute", "id": 850, "uuid": "5cce639c-da08-4e78-b615-66c88aa49d50" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "b8478b31-5943-4495-8867-e8291655f660" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/create-segment-start.json0000664000175100017510000000107615033036143025342 0ustar00mylesmyles{ "event_type": "segment.create.start", "timestamp": "2018-11-22 09:25:12.393979", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "service_type": "compute", "fault": null, "recovery_method": "auto", "description": null, "name": "test" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "publisher_id": "masakari-api:fake-mini", "message_id": "e44cb15b-dcba-409e-b0e1-9ee103b9a168" } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/delete-host-end.json0000664000175100017510000000273015033036143024263 0ustar00mylesmyles{ "event_type": "host.delete.end", "timestamp": "2018-11-27 13:35:09.882636", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "3d8d1751-9cab-4a48-8801-96f102200077", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "64d61bcf-c875-41c3-b795-19a076f6de96" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/delete-host-start.json0000664000175100017510000000276015033036143024655 0ustar00mylesmyles{ "event_type": "host.delete.start", "timestamp": "2018-11-27 13:31:47.451466", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "3d8d1751-9cab-4a48-8801-96f102200077", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 71, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "b5914f94-99dd-42fa-aaf3-3cedacda6b67" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/delete-segment-end.json0000664000175100017510000000121715033036143024747 0ustar00mylesmyles{ "event_type": "segment.delete.end", "timestamp": "2018-11-27 14:36:07.457369", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "service_type": "compute", "name": "test2" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "00184d05-7a96-4021-b44e-03912a6c0b0d" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/delete-segment-start.json0000664000175100017510000000125015033036143025333 0ustar00mylesmyles{ "event_type": "segment.delete.start", "timestamp": "2018-11-27 14:36:07.442538", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test2", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e6c32ecb-eacc-433d-ba8c-6390ea3da6d2" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/error-exception.json0000664000175100017510000000234315033036143024427 0ustar00mylesmyles{ "event_type": "segment.create.error", "timestamp": "2018-11-28 14:24:27.902437", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "service_type": "compute", "fault": { "masakari_object.name": "ExceptionPayload", "masakari_object.data": { "module_name": "pymysql.err", "exception": "DBError", "traceback": "Traceback (most recent call last):\n File \"/opt/stack/masakari/masakari/ha/api.py\", line ...", "exception_message": "(pymysql.err.Internal Error) (1054, u\"Unknown column 'name' in 'field list'\" ...", "function_name": "raise_mysql_exception" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "recovery_method": "auto", "description": null, "name": "testT6" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "ERROR", "publisher_id": "masakari-api:fake-mini", "message_id": "e5405591-1d19-4a8c-aa92-4d551165d863" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/process-notification-end.json0000664000175100017510000000144615033036143026213 0ustar00mylesmyles{ "event_type": "notification.process.end", "timestamp": "2018-12-20 05:26:05.075917", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "15505a8c-8856-4f3d-9747-55b6e899c0f5", "status": "ignored", "source_host_uuid": "6bfaf80d-7592-4ea8-ad12-60d45476d056", "fault": null, "id": 47, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-engine:fake-mini", "message_id": "c081eb25-7450-4fa2-bb19-ae6d4466e14e" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/process-notification-error.json0000664000175100017510000000266415033036143026601 0ustar00mylesmyles{ "event_type": "notification.process.error", "timestamp": "2018-12-20 06:21:19.315761", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "0adb94e0-8283-4702-9793-186d4ed914e8", "status": "running", "source_host_uuid": "6bfaf80d-7592-4ea8-ad12-60d45476d056", "fault": { "masakari_object.name": "ExceptionPayload", "masakari_object.data": { "module_name": "masakari.engine.manager", "exception": "str", "traceback": "Traceback (most recent call last):\n File \"/opt/stack/masakari/masakari/engine/manager.py\", line ...", "exception_message": "Failed to execute process recovery workflow.", "function_name": "_handle_notification_type_process" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "id": 51, "generated_time": "2017-06-13T15:34:55Z", "type": "PROCESS", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "ERROR", "publisher_id": "masakari-engine:fake-mini", "message_id": "5f3c9705-b3fb-41f9-a4e0-4868db93178c" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/process-notification-start.json0000664000175100017510000000145115033036143026576 0ustar00mylesmyles{ "event_type": "notification.process.start", "timestamp": "2018-12-20 05:26:05.002421", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "15505a8c-8856-4f3d-9747-55b6e899c0f5", "status": "new", "source_host_uuid": "6bfaf80d-7592-4ea8-ad12-60d45476d056", "fault": null, "id": 47, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-engine:fake-mini", "message_id": "285be756-ac29-4b78-9e2b-9756f5077012" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/update-host-end.json0000664000175100017510000000275615033036143024313 0ustar00mylesmyles{ "event_type": "host.update.end", "timestamp": "2018-11-27 13:13:25.361394", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "d6a2d900-1977-48fd-aa52-ad7a41fc068b", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 70, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e7f85d49-7d02-4713-b90b-433f8e447558" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/update-host-start.json0000664000175100017510000000273415033036143024676 0ustar00mylesmyles{ "event_type": "host.update.start", "timestamp": "2018-11-27 13:13:25.298007", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "d6a2d900-1977-48fd-aa52-ad7a41fc068b", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 70, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "d1a3ae84-7f41-4884-bc3f-fa34c7cd1424" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/update-segment-end.json0000664000175100017510000000124615033036143024771 0ustar00mylesmyles{ "event_type": "segment.update.end", "timestamp": "2018-11-27 14:32:20.417745", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test2", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "3fbe50a5-9175-4161-85f0-e502f9024657" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/notification_samples/update-segment-start.json0000664000175100017510000000124715033036143025361 0ustar00mylesmyles{ "event_type": "segment.update.start", "timestamp": "2018-11-27 14:32:20.396940", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e6322900-025d-4dd6-a3a1-3e0e1e9badeb" }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/requirements.txt0000664000175100017510000000067015033036143017462 0ustar00mylesmyles# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. sphinx>=2.0.0,!=2.1.0 # BSD openstackdocstheme>=2.2.1 # Apache-2.0 os-api-ref>=1.4.0 # Apache-2.0 sphinxcontrib-apidoc>=0.2.0 # BSD # releasenotes reno>=3.1.0 # Apache-2.0 # PDF Docs sphinxcontrib-svg2pdfconverter>=0.1.0 # BSD ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52371 masakari-19.1.0.dev18/doc/source/0000775000175100017510000000000015033036146015476 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52371 masakari-19.1.0.dev18/doc/source/_static/0000775000175100017510000000000015033036146017124 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/_static/Masakari_spec_process.svg0000664000175100017510000013253515033036143024153 0ustar00mylesmyles launchpad create a bug create a blueprint create a blueprint End states out of scope code merged bug fix? idea REST API change? submit spec for review a feature? spec merged blueprint approved for release spec required? add link on masakari meeting agenda blueprint hit by feature freeze re-submit for next release blueprint unapproved apply procedural -2 upload code for review remove procedural -2 review blueprint in masakari meeting no yes ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/_static/architecture.png0000664000175100017510000015515415033036143022324 0ustar00mylesmylesPNG  IHDR*sRGBgAMA a pHYsodIDATx^wSU?pt,ǵSwQПeޥ* *"Ә{}3$S2I&m>?9&9o9DDDDDDADDDDDADDDDDADDDDDADDDDDADDDDDADDDDDADDDDDK/=cqK.?񣨨h=t*.?/^RfϞ#""""sႥ-\1~K'tRCC?w1w\`X!""""|GGG[X!""""*}@µ⇫> DDDDDvZcɒ%z1jԨzW8nM̙3P(**:{D?** H G\(~,^K"""""۹J0ԇK> DDDDD6r!_x%J"""""D҇׻dCBDDDDd ᚥ-?ܥ!X!""""ㇻ> DDDDDVsrp҇`:NI'u?LJ> V~'"""Ǚä.e@hÆ j wy>}8U~7֬Y!?K:}bj.4{Э_^ѡp<=NXj䫯:|j"""""pZ0/}nsn-H9sCTSW 0`ɒ%\'~8'~YPG@?8'~{C`mRQ QVV6dȐc666J nL4iȑqqqa[ϟ/5kVKKZ}:w޾:n`8ڑp㏗qlmG5|p}XY-0~Ȼd+ d~3f455-[۷/Vm Jٿg}r۸+R܋OypJDDD ?.5g?7|}.@pCCՏM6acʕ[~ĉQ I${7a4^uU:Pm DJ_'⇞=֮]+/rk})V;ՓVXfMHH%ijpw J>w\6p@chWRXxވЂ6"w!qƌ';nĈiii:gq iu0OMM6 9nH #I&<$!;3Yw}W娩 %h)n,ZHOz*xpWƚEoZK~k?e.ZV ?xٮ؃L5j Y ƍ`sXu &="~`.\#V#""[Oȁ|z,W^)__;xf̘1k,<3ta;)뮻]Wq`ԩJK?b.}LMlذAVٿ?/b?qDm#$``C QM@ټΟ?hTg8~a7&t,+0+0Aw 6>q 8#GVX_ꁄ,aR2M]J 3]u7$HۮfiWCC  {п~ :ol* ,}U̙3G-y猥\(~DEE{pHaaAԑs̻QWPn5 ,}εGtt @X """" ׊>⇫@X """"R>IS"""""[bcEADDDDd#&W\4~>: 䦛nSGFDDDDDVqADDDDD񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈9񣰰p…Ǹ;#**J=""""""ď9sKc """".rNPsyuG@DDDDD]\[޽2~Y"DDDDDs/""""" \"~z{GDDDDDDb<""""""YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`F?c>}|}}DDDDD]a ďkĈh&Mu222Tkwڼy3NVZTv?IV^ݯ_?TMDDDDd?p@0` 6֣I ÇOOOWݩ믿ݮY7xo߾_~ÇU5?裏N0aܸqMMMjA+)}Ԍ1­pXXd{ 8pun6""""r pd ōYfh޼y:Νk?ʆ "¶---jF$KqUm o;sL&CVƽ-Ç7n,۷/6+Ȯl"ѪUHϚ;o35"ϛ6n4?|g m +ԟ.KCC,;S?ӧ9k82~m6rH%``)f&C@1C_~zYx`1dY*|s=`s}6Z&=fY+0y cǎm~ ۆ <1Ƙ!N8ᄤ$[߿_r#I/RUW]ercƌ9k`ҩ!cӦM?rJIƍ &MBz0i]ܲeKdMװ!C&[CC C}g9t萼jѢExچO!+˧юo>psԩRVF2#\lADrkIѹ0Ĭ"//SRoo>sLQ]FZCW_*^=$k͝;W@TUVгt<~̟?ͽ̹볰з1ƀaL&&#} }ڵkwAƥz1_*$- GaU,&~2"T'CIEBRW\?/XYoE9f̘JlXa;!!7Dd""05|̭H+i|'06JKK Fh4iWX^{?뛘y = ><55-ϰaF!Dg}V`:Z$p}Dž8"C83kjjd&Xyȑ)))o]xyycY6%o< wމ=`w,Wۧ?F'< ~EFUW].\_ hYf '(#ilAD*慘l555QaI?d+'Nl8##C5i05k+La@?ɓ'coiiiT`oތ 2a|СCcot۸)S;V;v`Q10kF+lj+¯m)hF 0ϕW^CI#miH@"""LNg '`E_"SZ,PXXpB̟??44Tu2""*ؐ=jjj(@f̘Y#&&L8ai7n̳1'?`cVR'.{ t4۟k"HIZYYLR8l}OwtL>!p0JHbWa̘1UUUj_]+O?t=~DFF"-Z(??ӸN֊jڀNFD\"~`9:f꘯c(~W鵷ď^z ALOō7b5ď䂂L1ѥ+q2 N;Mp'N,7GW\`|&_&-!ٳlcE_ ,Yog/o3ϩcY6 e)_ވk֬џgDWv/D`ZO#ICJ5u?p{N: -{N3!ߴ+)KmWڗF!v+7n-^Ǐ?~3O”xOBddñ1+"TDd"$.1a>VV";RDnn.f=c0E>>---//Zm)PBs#Zj/jA,\͜vHΤf|dF5ـ< N K Tgee!$&&&%%!x䠱 "0jƧQMPhT ?ȓp rECCCUUUII|~"xj@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@e-"tjƧQMPhT ?ȓp RLJ~xu׽'sS3>j@lADE6v٧Oc5GOZZZB r ncŒm۶]s5[y7nĶ}o¶/e݃)QMPhT ?ȓ8 ü\za!((( i[`9w3333sm#00GHۀ_L#u`< hoڡv]v-QNxj@lADU2Z4ۋ{ꎏ&$ ɑ ĉ=رcǾ;w[BDnD4jj"yd@@aVVVrr|GNN(|Q3>j@lADY'%+ yKJJ/OmEԌO,6Ш&"0~'@9  $ j ͆b dRDd"$Ȉ {f|r "$Ȉ@@hiiywzǪ^fmADd"$ȈCCM6ͤkll6Ш&"0~'@FD=ǰaze^ X< 2">,ӧ=FomQ__AADP=n_5 @A`< 2":(z^/44T?AAD,R/͋&M_|||MKK < 2"Arssg͚շo_ccРA=333ejj"yd]f\ C}ףG6/zlڴI/z8p 55)TWhTSxќuY?oF[nY~}]]]{5-ZҢZ=ydjhhXt9s>Z ߃AD'l^^^E__ߠ 9qEE&P@z͛7qw}wa-?c{h/!T׿1~do߶mo'"g*EN:hhQ]]Ѐs\Q{@z3fnl!/ҥK_iu饗 3I DDnYpknn^h.2kn!&&#"WsӒGdddBBԄЂe}vjF5 xG=uԡCƀ;3?^n1D~ 'y9iFAD8uWM\GO_ILqq1.= ?⒏I+&vD.GlllJJJNNNII^쁝m4jPhTSϰf3W_}u}駟TkyKƀ={AD8uW&LJ pAk + |2_Q[ڦl2mҍ1"33S*<<)d׫&oM3Cm=c۷X0/z?K jjNgsQQΝ;%WL2xA'xDZAD8uÇ744`jʦM&NOh@;i$ipRmoA02^ `s=ְ+rq8g\X^ѣY/z}1~tERRe$<+^< ;w?q""7*L˕$@ Zmc\1MAիWz n qoV1sLc@{1&K,a5vFi#re8ʙ"f~x{FDDHѣvmmyC'{jj^u)#@?.«*-:sgDD䶜?b˒:$*{$ ˛jm.`j?UMǜƤÆ [reUUc2\.oB s9r$&Xwa 3fԩ2c#$$$&&&99ӢNv"Ts9x{2K^qƙ|p\i4imaB-#"r7N4b天I jll qXdzs{駟nql˭\,H?@{(\G.-ɓ7k,]VV/1/ƏUVmżF E///GD߿>(G||a\^xHuGݵk׾}###i261[%?dc06|yU7o- 6l؀4i10I:%#""b̘1LʇLJ udxG+CCCѽzyV!'PMPhTE8qiӮny'Ƶ^k2(1~tp#ȑ?d؈/fTm]U4559`},:JӧOz)w^^z hWMIP[a5+$& fojU VB2T@jdWď?޸96|g79NI_GF 6O1V.T233F1SOs)))%''_ƏK1~ xcl7I qCDHN4h!FN`5F,5n;`},ŀ)]>l^z_|pqoשyJmWHЯ.Bd?Ll!C[O?Ih/~M6Ʉ@|WxdM"klԶ?߿'nݺ"8yDSkۛdF5y2\w^V3x:yBV^V-`7to<+NHH1\}80w@Ddο4balZhjY[GSڢ8qUyx勳fee]G ULeGSˎOJ.?uSMD6IUMwTkAAAF,}M&|B5Y@mQMj0+/B\`Gзq Gzz:HŌH}W]uULLLnnnee% D\xdjF-kZhjY[GS`!}ܑ4仳Bp#11|L̄0BD6 դ}C7R|{n ?^ F 񫀡R/b*kEDDb3WrݾX 2˜0cfO}2331c=//q'#V\{ 1Bٗ_&βl2Ky%z 0\tzQ&55FYY "ƍC D?:!S|cXAzDv SjD:$/iT^~kꫯիؿ^-Zoooi;0Fe?\5,qw}7DZ$d9;.\G'0NMM f@zXW>(==ko>\&~59wdN& 4>'B]]]4??& $!! \ /p1 "AذaI'$o "\sPfrB5Y@mQMD60 BOF''I#00+O(?~|AAy'"r^ȡ0L믿=z^SN .--tEfK4jj"y8G+++򒒒"""LBD.F"r~嗣KJJj[yj@l^) o>fQxi$"Gq%lٲsďbwW9}Q,6Ш&"t?TUUUPPgϞ/\?>C=3sHD3#)z$''ggg#ȧ]SC,6Ш&"t5~N(91 ?sꩧq&2HD6sNc#===??_YЩGQMPhT gεJďLܲe)q"`M=ȓHD|ӧ1x=0...onnֳڗKRG,6Ш&"X?gN7~}87md@O 1xi$"k`RWW/0s}=uzHdF5L/4LNN ظq13&55U?7DD݀F"&4UUUǏݻL_c:4jj"-pMJJ@k zj```EE D4Qav QSS/_fEsݱc=ݺ術&TDd XWWWVVe˖ &ܹ3##+BDݍF"2Ns=Fz뭷EiTDdę'8U5qqq%%%555ׯ_|9k D}xi$Srd}61iiioCf|dF5. $///555111!!A/xWX!nK#u #ˁbbb*++ݺS3>j@l`z(,,AςO?T ^z-[e";^0#sL`jjj!ȄO& 4v8@pz6h !liiiII/HD*,,lsځF) `_=術yPMPhT ?@NR8p斗|߱cǰaÌ dҥL Dd_4Q0?ȶm0M1yP1/AިFSSg=t2 jj"}?СC dʕL DdG4Q0HLLk))f> 1}A䐊~Nf`B5Y@mQMD6{8U-EUUGJJJTTԿ!C_|BDK#-"†9f,X:iQ=Lj@lMnii-..NOO ^~I/4WYF"?[g_|>}`ځ)|LPڛy:#PMPhT /~NWiWTT'uLȎ;0xNDK#)U:t<55/6m~~~UUU Nx dF5٠[H4;;;!!!<<1,X#߂ED㥑L))HVVVhhh||j@lpjOMM@"""nVTT;w=m4 "{ᥑ444TUUa4R[[ꇅDd@bcccbbE o>}_=삗F(33S: DryysL5z;.ԌO,6Ш&"8&~@jkkq"ugNN~馛zu͛F,"/D=f g2d 2y!N`f|dF5adhnn_QQQRRR)**Bݻ]Xw֭[&u /D=L)ѣG+@h3+#r nLj@l2\ȟ!jkk+++ˋ?@ad?su/D3o1C\Ӌ6]ȯjY#,,6Ш&"88~zCA|7K jc"H0bǎgq]pew#gES=YdF5GR]]]TTꫯ'yژ}4y,LJJJz!cO?Ʉ|1= dF5)CH(XGppҥK dĈ-'ՖDD॑a^㢋.RPPP]]#ԳQMPhT ?áC***rssHPPPT""K#iѯ_?4zyyaa yj@lzcKfff\\\hh1 0`˖-H)Oc4y\+z;>BOHH*--㛯:>j@l!cKSS 5]C ߂ED㥑Cz_SSE;w쑙fdhz5jj"H1~ $444 W^^WWWUUU^^QyoщȈF"7#=p뭷E"=Yj@ljd B@R[[h"c9=> q0uDnF"w7o=dy\ᦛnںukEy6[SOF5Y@mQMD6p2Hbdee!ls=rgi<"sxN,\;nFJ;#!r%wiljjz'FaxQ1lذ3gfffQL6o߾)7dѣ;3,TDd׌ DR HӧO7O \p!&GÐo߾W_}բE暫oWneka 1G'u?ùߥqɒ%F=֌3Tpu:t馛~oo0=K=dF5`l Ѧ"777111<<[ouGM[{OBw!qbܸqM4I?/AeϞ=x,.I'D=fX#nfƢ".~c`S Kgׯ:Gzj\&e,zȿ3džrQ.Z& 4.? ڒ 达& d̙:rP:2nudݚ40~[CѣcSNfK/-5vر~kV3fP6l: :+Ϛ5kٲe.yBQϔF=DfF>Hs *466~}Q^צM.yyy,zؑz5YLmQ;"3p 1 6& /0!!1OG&ׇ#l{ߑ4^8s.͛'⭷ޒ,~8~W)/r= rA VZ@(OdH~::zFZz׮]« uw4K't&d<zK/}\qQuWٳgϾER\WlHуW {?~t6-`l@bcc1Ryyy%&&b/ʺc>r{=1a„4 j<2 ^'x}L=_˖- ً\MJ><ԦVA&zH\Aު/{cǪf#"K >q;..SXď˗aWv{;fϞmL]w݋/^eCM{#'v?8n)3繰/('cd57X sL  2G',=ydz?<\tKldR__?_(bE-fŲ8qExxYĘ] WX@ܫchq{#pmn_Mz?+\/MLLtvΝm& XdاNskb|SL 1yzSSSĶ2"K@C[@YSS5n8tɓ'㒌nMPMD6p񚛛1w1tM XA" Ȑ=K'x ~]w駟TTT`LKNNްa< z\c&%%!(祊K1-ȅy üov 999&"}R&"x@AO )))QQQ۷o7&{ڒwhii)Rl֭[wvi.'|տ[ff&IQQBg}&}ذa`6wO>$;;[XgedXas\y=999~"~_)T < ~>ɟ]<( _y֣>*ߙkɟa0#O?pCx{{曳f2"bȐ!$v /Im|||0,=OXXBK/twO:NRO"r["u߿?։/..ƓϿu\w c,̴|\$C DdψaE :J ٵkıbV(--O0XqdyrrrrdC.55uͯڃ>xib &3`./J> ~.Qoߎc˗haƌ }N<{cܸqKV?g @Y@П1|2VTTTPPRVV~;B5cH^^^JJ !/G~~>?6GB4~0bD'|l$i$~>+E]rW"n&c]t*tO2I QjӦMaC-?!vd,}\èEWYCB z$R`w_j"'FH$dDt4i&p}oa<|< dcؤM^^^x'^zܝ83-plCѾ7nر#33S=*3Yمd,}0ZKq{Y*!r H#@PMD6z*)))((@@vΜ9 oVRLh zjqq1JJJ''++KMm]YI?FUVIЅ!T#zT6c pŁꪘ.=i R߶@(U+ DdϋFbB***1xw"JRńZ@nKDdcT1B {yy;VB1{u?Piiik d X 8K%K <#XFO^xj"xQz H yoÇR${W^t 7$%%ʯH }sd#X `tVwk'_lR7nܺupA_sTgҨNA0$ ď 6 6̘@/^e8tИh1~]@)cK0]ט=[=إ=D@u2ڴ4AAAk֬1I O>dXX^~ìB݇µ2>YE}رc?cooo\>'?~ DvuI'Nq00bxĠ]QQW_no߾kRdɒB݇…Kd;@:(zy;v onnf vލ:`+dj1`fee%$$ dׯкh"/M^{-z?.\(~A8"DDDM#Fx뭷p >x GIIIMMM٫p"tIa<}XdtEm6K%vik֬  Zn!Cx{{4Nd?X {F۷~{СzZ7t֭[}||pqqqʐqAņ؜z8_gR!icuǥ}g0`F߾}'OO"o c" ?p"ԝ?.\%~A ."TL6M]aĈoqEu1***)))++ seee}}=DgΈꂂ$*u)=^w~tI#q9ѣo+Vܹ>8ˍ6d.?1AbCR6lI_~A955ƖD ˊl㩔Y^^/Kelʔ)ƾ!"Qo߾:W^ypV?Gzꩧ~zڵ4t|` ԖΝ=,]EbمKď{c#,}0Z^=+E\0e=@@>OOO&@n/--ũcGi… O;4cF|||RRfgupG.CÓ'O^%8қo){ .Dv`apݑg{S ™n1d}綾^¥ "D˗d)Ad/E1D`@9;;;===99r ؿݻo߾u͛7K/ zpH|瑝qvAΙnM 5551qU5rH\rrr˱+&N(Kw^__ߠH,zY:)V ȕ!r0bŸoI/B8"L%8ի3#~U^ygk⪤~.}"Vg` pZ`äةq X]VVV]]]UUvyq]tE6l U &=lAëBC0bp.//G)((IOOOJJ ܻw"d %8?$<ֶr7|3&5!Af2 gΜZiäNuȭ륏#F`H J\ےׯ_u222X YBV>҂(h,#`:"(** 3cg<~|DzygIسg#<"N>d\:wc pN5fРALb"6@N}w x0nݺ5666333,++MOO(555++KXĢ?:2F`@?op!HǏYfɻڳk.hwܩ1~ ,|߾}Ny: '>"##n޼yIIIyyyaJYCCCuu|ұ?3!honnfуFd; !---555u<>s}'Nc c'F֭;Seʥ6s8 G>ǚsN4+Kp{[us5jDd- =~`#3u${Z /ȚcǎEP0?fΜilx\׿p: 9櫯^8Kx1os@W&޽{'$$ ~`„ʾJd; tIV-p4=~HxPmeӧzhxh\rɑy1K,^مCKx`UUU^z CBhlld  t+Z̞=[7o^{t7 >0ihhcOL3a„{7(( SFe"` h3~`x+Wj5s뭷jÇ 4-?"""TѦN*?Kb p\` A)͚󓓓1t"_~'K䍐 $H;>>>555//\)ـrǹ#X`ĉO;4^燀l7|>}|g; مKR\';; $...&&[n ^X V@ʅ7_ ?.ڌ#8ΌB~Ս=zѢE!!!m$RO?TL,d,} ӝD;wwq!3׼< XT &IDAv!ûI{? "`Ͼz`m= :Tms>}L6NAv9 zCKKKcc#:Eee%EyyyϠr-` G*1{:y~.ڂvͽ``NK\lr"%*-/G̐JdGd$*&C}?.=~AN. 0hyzdƌIeM# "}d;tc t<z!9 SMD݃<~ț0~]to`\z2s̆EC>&A݇KbbbX!r$6G``|V E7>OBHdNAv]r,9E8u=K3Y!r6ㇿ?un,}`a?.#99Yp_!Pwضm[߾}q2~%~A"` n4gt#GE!ڰaÅ^'?y-?X wc0~]})--ҤIQ/^{9lxN:i8yJֱ`z) DAv!uyyyfffTT|Ayꫯ~饗6m(U[n}嗧O~'ʤo߾s###322%JVs` DAvN!*///!!!88xݛ7o~衇ƍ' ?1|7|#<l߾G=ӳg>3$rsx),\믿޹s'qqq999x?:v,}B_e1~ ~ϔfdd8p 00p޽O?_>cW^yaô/?7|LslܸIlɒ%sτ 0䙑g c߾}`)2ɏ?믿ڵ/***-- *&x#YǞrw,u7tꢢ"$P__={ر-[lڴ{Gg͚u9wo $ӦM.\ML?ؿ?YfyOdN3f8pC뮻Nݽ'@?☑1pz㒤GcݻwE C8p@bb"PܢrMMMx=FfWyO}[f0~ݡj.FR̞%ddd&%%!b-nI0X9 蘩K8]'zD~ݺu7?t۰a÷~"N I*য়~‘%`qHVx{#FG`&y#99iM"GAAG*$:: x9#'~<?An X5k DAG!R 7:Uuu5&xFrrrH0p%`j.3uL%`o>L"W_}5fA+[.C@~ %$N !Q@GcJ1(3$l !I3& ϛ<ӓlgy@ u7m|$DfL*>5fjcWXXLY8%cj.=SvL%WN0ҥK̙3k֬3fL>k޽U>Z߾}?_YA FH;K@ I!!!k$ D 1$`q8~< x\]yyyxxx6+>bÎ;~7Ν;xJ"s֭[ߏ,+(^#A$NNAB0-p<8*lj1q8XHxMz %[z6KL jY 3r>>H)eee8${I PwIY,}PO^1~BD{$v0nܹ3777==cǎC LIN:uݑiii55773Y?X";b 7cn//'&~A= z]w?-DGGhS//KwK{= {?B@FX|ď~_+VVV666SL,8t{nȐ!555999rĉ?Cooo\-233:#/Yd̙| 9pV(--Eۮ?X "1~B*@BXbʼn'(So+**c"""‚*"##SSSsssBee%Juu5ngddaش4d,jjjۮ?X̤QW1~ BCzѣ333?%p9E'ؼA>\^^@Ap޼pu-~A=,و\ :$駟޻wo-w_DyW3221C9usrr$x W477hтp =+"х D6b ׁo9s̓/^PAE!5 ے@|!{mWD "\ZX!999Ç6۹sghhh\\ Y)B Z"c=V' ]pu"#K=>"[0~IH@x@x'(oR)t H<~5҂~nsDddixX \EX!9őLx&### `~IQ7000<<<>>>==Jd nFZ&QdQwdzA$X!9hČO.,,D@#,,,&&R1{k"E* >p%c:0H!]?_xtB'Hh/++>SDvy҇\"`:xc͞=#'']#!!#//  "<~HCNW>F,Y"O__o0V"@A &Cu/E]'/񣼼RSQQ DIC="[|ʘ9zh_ HLLE@o?~HM:|Pkך @SL|W}ȱgu}]\U+9 D]Aނ ?vEE†mO^jhh *++O<3<a,}tIw#sʳ}šoZɁ=X!t>OݻSO=JMM :Rbb ^~e___?MPPPDDD||" :.!މ۴?LJjk/~|'22dHBB?oVǽ,,u =믿>s3pq]^^^XXqtlLo*++ %{`WoDn0)}Ig׷_~hϝ;wʨ$\+@ard tyĉxĎ^>(EAAAFFFbb""Glll||<:[VVb IMM s??ay10uxw޽jժ3gޠYbŦM0Ś{LjZͼ{7tŋqD d3fKKKCz_y.˗E`l5?x O?}7b< r{_t)lr~~~x8O<1po`:=^{~ӭQTT^ݻ7 D` W߿ҤI&}EDDA~~>BGss3Fi=)ڎ?~VKC-&qs̭167$!<ӫ ׿₪ocǎķ98p@s'ï B'XQQbE8䘱~!K/ᎰH'n׾} m̧7dQ<'ܸ-gΜ9x6|ЎM `GV[PPV /wމG'/ٳY!jY] Y\䚢vJJJB@/*Ӕ'. D.fC-&d:uTLی y\ ks9K,y8 `g}vuut=쳲}_}]>#Fx`,1 d$xXm7.^D"+N>}on@@@hh(.RN Ⴈأ\Jcjp1vj,z߃4tPL;^}U?`$>O???wV7|IBXz/_g̘!;wҞNK/$ǚlڴ Z?0Hpx&EXaҥƻ^bͤSO=%~ѭ-[L$3F?KgΜYYYYQQN?#rt,-//]ȕ?"""@{MLLZA_\s5`J\V ,MZ yIyk Uq%_Zr䬳2Ƙ6u?̙#{۷o1Z˒~mc`q駷l޼Yx&[?4wu 6Ż+Ǽa|ߜoZ@DGc6IXdɐ!Cpb/--RDFFb hnn.zNUU.`=~?pΚ5KN{7oZAvڬSO=϶9i'&]w| /PVxW>̹sJ~\ԩSu 9W]uMވ4~pL|7zy cƌ |||c+$z^vE K.5ޝË>N:BA&pyBXbСC%x;۷oGujj* K𨬬ĥW+"7F/F{ʜ?6g'x|I)+caǎvkV T 4o6~bO?7&:7 0k.y_o __`!Amg}1#ǪU<)YBGe~W!Ev"vc-#Gh>~viړqCS ܖOcʞ?|yYYqe I.\8p@S˷rɤof?ď!A ۶mSOG;MbB,a0p9ƅ =p!~饗O?p2]M?I'I@KmmW\!\uUHڣGmaT+Wƕ544Tbܹs2~0kpj ?:Cz󟲓˗[ǵ^+Ol\\!c,)>,Qnw^# 80~S/2V]ˊ+).~~~Oڵ W=Ehhh&Kj7~d6)xm?p-/Ţg}Nw}l'%$$zcyɽQaN6ˍSNǍ7(Ǽy1,uت=,}u Fsʻk9%&2@$4"` cu_`\8p9 z7FXp+(GeeeWZI ND&h9mٲTO?/+VȢgy` 4,,L[Q***w XlH⋍'|Rd7cTܑZ`mXj$z'X d3fdgg'&&GBB&R+vqDS^>t[N4 7nDE0//>Ν;qeA ?p@JJs.+;mcdܹs?… |C 'bd$6}WoCZkg_W\?33S>$A|С¶mی+{r}Y̛{.~|hdzdra@@7| >7l؀1~אAO>dee%F$siۿu]W^ߋ/ "777)))22244W\< FTKDZi/~.}+}~؛4H°X?C=$x?Ks>:Q' D7oapc^:%%UMM U+""sxM/4̙3/.Gqq1FNN*~Cry2B[$~G@:z񳥥~;S(d1zyyS?+V?-{O߹>^2Á<rǕ4hZ|G1g:~ȳc[~;Ņ.mf"s paIOOmСYy睧e^xaxx8㇛+KF{&ꫯ޸q#zBZZ$ Myy6 W="Wɑ>ZGm2;ǯpgffb)&˖-`0b5+0!!ͫ)\<447??cт>߶m+u֌ 8!Evj_uTTj{m67dG YYYmF\?tϞ=X~d3\:~܅IZ@Dpc(hhh0R9Ri]v-(P?`js$Wzw1׬Y}.zxq=l <~@p:dhsFI-[Hcԩd;k3`Xc}:=` :؛,`?XuQ>,gGUUU~~~RRRhhOpBn.p!̌ 3ܷonYpAx9|cƌу'|rnn.^;ďFEEEGG#xRsg_"1,K>İU\p I$B0|e˖},}5> V;+**0=p)SxyyEGGgdd \ ^/7*k7x~8<<&^>())A]Z9/D=Vb5 5>q855S;v1B)~w!!!Du 0 :~pBu?6 "o$I"2ZX,}q2777>>>00^ ]wU^^٪LUՖ<ㅐ3 ~د_ >>>ȓ ux 92xt>34B9ƎchUMt-~ 3Ad#\,0 տ__ /pGDDlEI 7ܰxbu9@'tRo-[#...== +#r m^Gn(_lC=oaS wX0ZpD2P\ .H 3f۷o^;8꒒um߾? "&&&))IGee%d#CvMc„ {%O.`z>z9sWȮƏ:ku#..n޼ydYYYqq12F||<aaa'&&a+jԝX {@VXeٲe,}Lu\GZZߏN뮻#77%%%!u3NNܨbEEEG,}(U'9݈>!Czm<&N7dff"{H)((@@Kee%A'cR;֍ V,PGO&PMd3\/0UeVWz:{o}VE+GQ]]m<y>z8yj";92cBH:7~Fx裏p^K9Nj/~daܫzPK\'N?Xf͚/DτO?ׯtѣGe.̦l,}z5쵕jhΊxzc)))IHH y___ \=Q=Ccm۶M/')<<ZF=OCCYg%wiiiQ\M@ȃA Dd\6lp"x z @(,,Dƈ x8p 999;;_yH88xw,}^}+:BkT ?p9Ff1[Ճo_ ~dff&%%!rǧdeeFp9+`̹] X  T ?p!ƔTW}Q$b  MNNGYY|CJ^]Kd}_e<&7xIj"._y;ܵkWbb"GyobpM,}P{L o+@0a`bQ Qdڒ/嫯zڵ>>><=ssstMR/.@vAe<KdF5i!//oC ?_ve?S@|d SXY] vb2) wIOv.]tС2%o^{,BHKKC䈌OMM-..F>=~ɅA@p=/&z<7ȽA&TWШ&"?pIEZ]|QCvi?CII l$\UTT`s_y0{ >rnQ X j"WX$>`̘1SNYr%Fnn.GiiK nWVVJhP{'Y}O?'3Yr_,}94-\PmFd'ƍ3#G|"##SRR0ը2ӿQA]pgKAqF4@MA?d9sm ǓO>k.__ߐt%ox/>C\bH޽Y!`RXjKChTDW?S񇷷+\j1п CRb郬 pH,{yWX s%jڀ<;wE MII)))ill4JI+@?r/,}P{TШ& 4*z>|u\R9BCCcbb233 zP • riĨ>=+j@b?75Ȑ!X]]]__aX [l.92>[hTDd\:'#777;;;''@ C3AsHw]1+-A!TDd-\O񣢢?ab+ ?>v3YK# XQMPhTLGcccCC~|Z<Kd/YK#FC@űA!TDdc@ސY1xx &Av4BdF5?pթYmۆ)Kd/.XK#G@eA!TDdcPMX BqHd,kb,F5Y@mQMD6`9XjnX!YBzPMPhT ?z>;Z jX .QMPhT ?z>T jX I'jj"Gu*8h c\Kd9K4jj"GOu7)8h c\+ү_?,}PǤdF5ـ'`郺@7! <񊊊N>duLuj@lLJ+-u)8n Éd,̟?9I飠@- jt,6Ш&"0~x'Kh;_}'zOvJġ \,}PWQMPhT ?<?AT__o,;/8t c*-B5Y@mQMD6`l,}#B ,,}TwѨ& 4r<HssZX!ga郬 F& 4r<@0BYGj@lX gqn  x,}udF5ـSAs2@ȑK~#/i4jj"Gb郜ˉ d,#AVn#TDd9 NX!`lF5Y@mQMD6`<,}+pViy_-#>sj@lyX W32HTT=/>FhTDd@9Bݍt,6Ш&"0~x>u8䁌>,}TѨ& 4r5/8y cKd;?B5Y@mQMD6`$,}AOYq@ubc_g郬:F5Y@mQMD6`,}krp ^}U1j(>:҅j@l13gA.X!bE!j@l~7>e9 d_,}H/jj"g`\# 2[0 d >ȎT7Ҩ& 4r}+@pΜ9,X ;$TDd>@\h clٗIdF5ـݱA1X!۱A%}I& 4n„ ,}[pLĵ2@,}ݩΤQMPhT ?o @\k clٝt',6Ш&"0~ &>9r d>;QMPhT ?K䎺r d>;Hjj"bQw@\q c&KiTDd7n-@u+uTB5Y@mQMD6`pS,}d,劋G-X ;RJ,6Ш&"0~#>u_E2@r,}P~%TDdw+@@n:F5Y@mQMD6`p;&>uSu2gO BLJj=Hjj"ۙ8q) JO> r] .=Bc郺[dF5ـýꃳr_Qq遌Kݤw dF5ـýA; >Ba@u/j@lFX cd,P{X &TDۻ`X0JR@AgTJQQB*GZYaۨЩB%"f"w *.M|/?7{}gZ{?0ޟZk{}!ʏSYz%Hd4@PqW@w;,P >!GQ@*i\s5Y]Hd4@PaP >!GQ@mh}n& j)F"8Z͌R (? [  Id4@`h}> j)L"CdwP >!G@+UHOlZ3P~4@J {:x(xȀ#h}uX]k, hq>P~J'23Zh56mkZ`E7\|B! ʏК6noO Yb5J'2'ZheY $2HdYx(xȀ#h}]n eU|B! ʏtB"DRR (?{H $2Hd)x(xȀ#o<@ZtB"DOR (?r{"]h S|B! ʏ\9b!@"KC)C@a>`[ $2Hdx(x[m+WtK#?h}嶵B"DIR Dk!C/4kW=W^y%i/WN<d…g}5[;u]{ya\pAu?K.M:ȗ^z6nܸ =C/Uf͚.1)?C)m=dV9U60%y6c $2Hdx(xA~E? [Ŋ+kӧGk,Mע!*Bþ/&ٲeO~*Dqmdş;ŠAsBtEN5@ve $j.H~xgpRs[s{ٲew]"e^ _{A6Yitimm5;͛7[tR ,upҗ /h斷>|Lsdbz V/?ۏ:(m*~'>_WteَN=ԊwQ}]]ӓO>yיmJ7IwQ,uD=R &&F /P\~Ƕ곿˟d ⃛knaKme%:ɶ3:ɝc˖-A>z z/_/Zh=t;찃YW^uŗ>Is5@LIJϥ^zϞ={ҤI{ŵU].{W~vm6OwkU:T(.J,% I VȲP >!PD>ΝeV^xPd޼yJ.*guSN))IC-%m7@ҔG}>h$NGW z#hVѝE]nyN5>o{Xj՚5kU.^*S>餓oP"V飏>jK.raXBU]z}8GJ+(zk<;U6k̙J~k;SWiP~Ng/lE,u5HT~O}>|xsyqƅ7_ -*cǎ?(ڵkG2"G;q6{'~ 4GтFٮO~z.ZFtn"+:tjC=TKtz{Q!3kvi'*+}Hsx$Pطo_U˚^r}WiӦEl{zI۾JAM,߸(@ .?[CITjzt+Hdx(xA#g?ْ@Yh-=R%#GN:dЫ;oڥ/;=\儊ی矷E8(]3ƣ[Q~@"KC)C gqƖ-[\⺭bݯ|+=7nI駟nw|G{0_AF>LS~vivN*ѮtE˵ ̛7G{6U-Y_~(ut~VZ|KKZ+X{YGc~WUoPŢHjU6mcwׯ=:vɣ=z\ꉳdb?%_%$xb+'47zuwqᅧjΜ9Iی5jHi֬Y}3?$x(xACDO͊凬X¾ mZ$0acօqr.{!:ȏ}c珞\ޔ߶[B[PI駟nkkRShzCU/iƫBPcC͛ivuW~vF<>}vC{~/*y :so &Z~_OGq0`@=~FWM=P{Jm#*GUv*q| Wۃ#xcVȲP >!PԶ^h'ꍤo-h5^ HME]d{5j-kX~2:^~|ԩwߜ9s,XIE;#z2ejoϩ5@ʏ>چ=IIL[rV/?T]շzovt%wtرڂvWrWD(?(o}׍g|Bm!of9sܹs+ZF_Һk[~2j̘1CYCt?e}-_z-6-W\{i]ݕ\/-tq]}-t6BޮENY*r-1U*_veZ9͞=[ &:]\}>nkֲsZ9X~r)O=fݏ}'^~~6n5zh<uFwݥq1ح;z6tHd x(xAj^~mps̩X~h%rG,;JԼAjhxz/Ci-ֲ8;mG;Cƍund] 6hׯʬx Z_+x͚5SSLb=o}/C=E-Zetؚh {Dϩq=ˏ/Ɣ#!c.d!C}կk{䯽s9Ƕd,i}$jJ&m kixb{HU?H{{{~4gϞ=.ho馛'I47ވN8y睺 T(?KHI4P >!PtG!ZC-ZU 3F˵հ^{*h!Cױ;e%>kPtzc`ݟZG[7|usΟ?_U,\𩧞RPwֲk֬ޙ4i_\ҩ4{iԔ:'t1UmmmsCW;<=WDC9ľ$\yKߑ%*D;0{I(?KHdix(xAc#G`dON 9s1}N?裟ԧlD >}uQ&L8:蠞={F]Cy]wSQ(Sk/?OLm^v|hd-+׮]}ҥO?*'xB֔lG|_6mMR>њUJ6]wuر^z_/yȐ!TU :tM`[_,X`?=1J7xhwWDG5w}/I̙_EuZ~>4;n%Y֢]ˏӧ[!v<i!RsJtn-ju7v/ʎJ m95b;R^~ /YC*W$i+hW7+Qk9aG^Ǭ`<jKUo} Y&5J'j-N=T}WWc>6mZe>TvWOƊ$}UW`UW7awQ>RE7x '~vU?[n"ʅP;vm_em+]|Pņ'?1c|=ȑ#5wʕmSCHdSi|BG>Z]c}Z\x mMSRqz#A_BJ&GvvhaÆu֩X~ʉ£kM$=R{U9KzIFVy PK&]NϿƥʑEGBOⶩ!$2Hdx(xȀ&l_Fהv[Zܶ>D<J'21TlhydL4)*?hmkCHdP >!GmYظqi]h} yB D,P~ ,!@"ϬR (?ZPև V|B! ʏ<V!$2H5P >!GAK9ó>D,P~ Yf3RZB"D_P~ߤIc]vf> Y5J'2? h%sv!$2H)6P >!G!AӫICHdȒP >!G!AsUCHdȳl|B! ʏ&Vև Y|B! ʏfUև 'C)C@BM!@"KC)C@Bͧ&G y D<J'2( h5o} Y5J'2( h$x(xȀJ *H@P8$kD,P~m ,N>D<J'2(4 (nm} Y5J'2(4 (n} y dWr[B"DR (?N AAա!$2H 8P >!GBCCHdRP >!GOCHdsp|B! ʏ@9Wև ea|B! ʏ@yVև C)C@Mrn!@"KC)C@M򩞭!@"āR (? P=[B"DR (? }Wև 'C)C@Mrέ!@"KC)C@M!@"ǁR (?ĉȮJrGFQ:::D"DR (?ƍH5k֬>}M(sέCCHdSr|B! ʏD אև %e|B! ʏDըև gC)C@͊Q!@"C)C@͊!@"OA|B2hb4@ѣ$ԜoȀA5!$2H4l0O]2hn4@Pgm} ͞={v5! ʏFTxz!@yR<d@hnԘu! ʏW ԘC@j.!@Yf72h4@rԘC@-UNZB"n<d@"h!$21 (?Z t>D5fxȀu j"W!@y<d@RhrԘx! ʏBև<2h54@PCyk} j̲GQ1h  .!@y<d@т&MD!$21KC@-gCHdPcP~& (!@Y72hM4@En[B"d2) ,!@Ydh 4@5yn} jlذay&Cˠ.sC(?fϞnYjo߾ZMA*/ &GqD[B_4@ެY+/5 H)?a(?rMxdԩ4@PC>; T!ZBw4@P]QZBP4@PEQZBP4@@( @( (W#0hDICWAqk}@Ap( 0El}@4@TiAEl}@4@LB!ol}bzeW_ 7@ B8qbA5jTZBPHh!C,^Xx@P~uBN(? :P'@] #IENDB`././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52371 masakari-19.1.0.dev18/doc/source/cli/0000775000175100017510000000000015033036146016245 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/cli/index.rst0000664000175100017510000000037515033036143020110 0ustar00mylesmyles========================== Masakari CLI Documentation ========================== In this section you will find information on Masakari's command line interface. .. toctree:: :maxdepth: 1 masakari-status masakari-manage openstack-masakari ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/cli/masakari-manage.rst0000664000175100017510000000223215033036143022011 0ustar00mylesmyles=============== masakari-manage =============== ------------------------------------- Control and manage masakari database ------------------------------------- Synopsis ======== :: masakari-manage [] Description =========== :program:`masakari-manage` controls DB by managing various admin-only aspects of masakari. Options ======= The standard pattern for executing a masakari-manage command is:: masakari-manage [] Run without arguments to see a list of available command categories:: masakari-manage You can also run with a category argument such as db to see a list of all commands in that category:: masakari-manage db These sections describe the available categories and arguments for masakari-manage. Masakari Database ~~~~~~~~~~~~~~~~~ ``masakari-manage db version`` Print the current main database version. ``masakari-manage db sync [--version ]`` Upgrade the main database schema up to the most recent version or ``--version`` if specified. ``masakari-manage db purge`` Deleting rows older than 30 day(s) from table hosts, failover_segments and notifications. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/cli/masakari-status.rst0000664000175100017510000000367615033036143022121 0ustar00mylesmyles=============== masakari-status =============== ------------------------------------------ CLI interface for Masakari status commands ------------------------------------------ Synopsis ======== :: masakari-status [] Description =========== :program:`masakari-status` is a tool that provides routines for checking the status of a Masakari deployment. Options ======= The standard pattern for executing a :program:`masakari-status` command is:: masakari-status [] Run without arguments to see a list of available command categories:: masakari-status Categories are: * ``upgrade`` Detailed descriptions are below: You can also run with a category argument such as ``upgrade`` to see a list of all commands in that category:: masakari-status upgrade These sections describe the available categories and arguments for :program:`masakari-status`. Upgrade ~~~~~~~ .. _masakari-status-checks: ``masakari-status upgrade check`` Performs a release-specific readiness check before restarting services with new code. For example, missing or changed configuration options, incompatible object states, or other conditions that could lead to failures while upgrading. **Return Codes** .. list-table:: :widths: 20 80 :header-rows: 1 * - Return code - Description * - 0 - All upgrade readiness checks passed successfully and there is nothing to do. * - 1 - At least one check encountered an issue and requires further investigation. This is considered a warning but the upgrade may be OK. * - 2 - There was an upgrade status check failure that needs to be investigated. This should be considered something that stops an upgrade. * - 255 - An unexpected error occurred. **History of Checks** **7.0.0 (Stein)** * Sample check to be filled in with checks as they are added in Stein. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/cli/openstack-masakari.rst0000664000175100017510000000035315033036143022552 0ustar00mylesmyles================== openstack masakari ================== To control and manage masakari operations, the extended `command list `_ available in openstack command. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/conf.py0000775000175100017510000000665115033036143017005 0ustar00mylesmyles# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys sys.path.insert(0, os.path.abspath('../..')) sys.path.insert(0, os.path.abspath('../')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'openstackdocstheme', 'sphinx.ext.autodoc', 'ext.versioned_notifications', 'oslo_config.sphinxconfiggen', 'oslo_config.sphinxext', 'oslo_policy.sphinxpolicygen', 'oslo_policy.sphinxext', ] config_generator_config_file = [ ('../../etc/masakari/masakari-config-generator.conf', '_static/masakari'), ('../../etc/masakari/masakari-customized-recovery-flow-config-generator.conf', '_static/masakari-custom-recovery-methods'), ] sample_config_basename = '_static/masakari' policy_generator_config_file = [ ('../../etc/masakari/masakari-policy-generator.conf', '_static/masakari'), ] sample_policy_basename = '_static/masakari' # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'masakari' copyright = '2016, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # html_static_path = ['static'] html_theme = 'openstackdocs' # openstackdocstheme optionns openstackdocs_repo_name = 'openstack/masakari' openstackdocs_bug_project = 'masakari' openstackdocs_auto_name = False # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # -- Options for LaTeX output ------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'doc-%s.tex' % project, '%s Documentation' % project, 'OpenStack Foundation', 'manual'), ] # Disable usage of xindy https://bugzilla.redhat.com/show_bug.cgi?id=1643664 latex_use_xindy = False # Disable smartquotes, they don't work in latex smartquotes_excludes = {'builders': ['latex']} openstackdocs_projects = [ 'oslo.messaging', 'oslo.versionedobjects', ] ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52371 masakari-19.1.0.dev18/doc/source/configuration/0000775000175100017510000000000015033036146020345 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/api-paste.ini.rst0000664000175100017510000000027415033036143023540 0ustar00mylesmyles============= api-paste.ini ============= The masakari service stores its API configuration settings in the ``api-paste.ini`` file. .. literalinclude:: /../../etc/masakari/api-paste.ini ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/config.rst0000664000175100017510000000047015033036143022342 0ustar00mylesmyles===================== Configuration Options ===================== The following is an overview of all available configuration options in Masakari. .. only:: html For a sample configuration file, refer to :doc:`sample_config`. .. show-options:: :config-file: etc/masakari/masakari-config-generator.conf ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/index.rst0000664000175100017510000000236315033036143022207 0ustar00mylesmyles=================== Configuration Guide =================== The configuration for masakari lies in below described files. Configuration ------------- Masakari has two main config files: ``masakari.conf`` and ``recovery_workflow_sample_config.conf``. * :doc:`Config Reference `: A complete reference of all config points in masakari and what they impact. .. only:: html * :doc:`Sample Config File `: A sample config file with inline documentation. * :doc:`Recovery Config Reference `: A complete reference of all config points in masakari and what they impact. .. only:: html * :doc:`Sample recovery workflow File `: A complete reference of defining the monitoring processes. Policy ------ Masakari, like most OpenStack projects, uses a policy language to restrict permissions on REST API actions. * :doc:`Policy Reference `: A complete reference of all policy points in masakari and what they impact. * :doc:`Sample policy File `: A sample policy file with inline documentation. API configuration settings -------------------------- * :doc:`API configuration `: A complete reference of API configuration settings. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/policy.rst0000664000175100017510000000112515033036143022372 0ustar00mylesmyles================= Masakari Policies ================= .. warning:: JSON formatted policy file is deprecated since Masakari 11.0.0 (Wallaby). This `oslopolicy-convert-json-to-yaml`__ tool will migrate your existing JSON-formatted policy file to YAML in a backward-compatible way. .. __: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html The following is an overview of all available policies in masakari. For a sample configuration file, refer to :doc:`sample_policy`. .. show-policy:: :config-file: etc/masakari/masakari-policy-generator.conf ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/recovery_config.rst0000664000175100017510000000054315033036143024261 0ustar00mylesmyles===================== Configuration Options ===================== The following is an overview of all available configuration options in Masakari. .. only:: html For a sample configuration file, refer to :doc:`recovery_workflow_sample_config`. .. show-options:: :config-file: etc/masakari/masakari-customized-recovery-flow-config-generator.conf ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/recovery_workflow_custom_task.rst0000664000175100017510000000471215033036143027304 0ustar00mylesmyles================================================ Guide for Custom Recovery Workflow Configuration ================================================ If operator wants customized recovery workflow, so here is guidelines mentioned for how to associate custom tasks from Third Party Library along with standard recovery workflows in Masakari.: #. First make sure required Third Party Library is installed on the Masakari engine node. Below is the sample custom task file. For example: .. code-block:: bash from oslo_log import log as logging from taskflow import task LOG = logging.getLogger(__name__) class Noop(task.Task): def __init__(self, novaclient): self.novaclient = novaclient super(Noop, self).__init__() def execute(self, **kwargs): LOG.info("Custom task executed successfully..!!") return #. Configure custom task in Third Party Library's setup.cfg as below: For example, Third Party Library's setup.cfg will have following entry points .. code-block:: bash masakari.task_flow.tasks = custom_pre_task = custom_main_task = custom_post_task = Note: Entry point in Third Party Library's setup.cfg should have same key as in Masakari setup.cfg for respective failure recovery. #. Configure custom task in Masakari's new conf file custom-recovery-methods.conf with same name which was given in the setup.cfg to locate class path. For example(custom task added in host auto failure config option): .. code-block:: bash host_auto_failure_recovery_tasks = { 'pre': ['disable_compute_service_task', 'custom_pre_task'], 'main': ['custom_main_task', 'prepare_HA_enabled_instances_task'], 'post': ['evacuate_instances_task', 'custom_post_task']} #. If there are any configuration parameters required for custom task, then add them into custom-recovery-methods.conf under the same group/section where they are registered in Third Party Library. All config parameters related to recovery method customization should be part of newly added conf file. Operator will be responsible to generate masakari.conf and related configuration files by themselves. #. Operator should ensure output of each task should be made available to the next tasks needing them. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/recovery_workflow_sample_config.rst0000664000175100017510000000100715033036143027550 0ustar00mylesmyles=========================================================== Masakari Customized Recovery Workflow Configuration Options =========================================================== masakari-custom-recovery-methods.conf.sample The following is a sample Masakari recovery workflow configuration for adaptation and use. The sample configuration can also be downloaded from :download:`here `. .. literalinclude:: /_static/masakari-custom-recovery-methods.conf.sample ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/sample_config.rst0000664000175100017510000000425115033036143023704 0ustar00mylesmyles============================== Masakari Configuration Options ============================== The following is a sample Masakari configuration for adaptation and use. It is auto-generated from Masakari when this documentation is built, so if you are having issues with an option, please compare your version of Masakari with the version of this documentation. The sample configuration can also be downloaded from :download:`here `. .. literalinclude:: /_static/masakari.conf.sample Minimal Configuration ===================== Edit the ``/etc/masakari/masakari.conf`` file and complete the following actions In the ``[DEFAULT]`` section, set following options: .. code-block:: bash auth_strategy = keystone masakari_topic = ha_engine os_privileged_user_tenant = service os_privileged_user_auth_url = http://controller/identity os_privileged_user_name = nova os_privileged_user_password = PRIVILEGED_USER_PASS Replace ``PRIVILEGED_USER_PASS`` with the password you chose for the privileged user in the Identity service. In the ``[database]`` section, configure database access: .. code-block:: bash connection = mysql+pymysql://root:MASAKARI_DBPASS@controller/masakari?charset=utf8 In the ``[keystone_authtoken]`` sections, configure Identity service access: .. code-block:: bash auth_url = http://controller/identity memcached_servers = controller:11211 signing_dir = /var/cache/masakari project_domain_name = Default user_domain_name = Default project_name = service username = masakari password = MASAKARI_PASS auth_type = password cafile = /opt/stack/data/ca-bundle.pem Replace ``MASAKARI_PASS`` with the password you chose for the ``masakari`` user in the Identity service. In the ``[coordination]`` section, set 'backend_url' if use coordination for Masakari-api service. .. note:: Additional packages may be required depending on the tooz backend used in the installation. For example, ``etcd3gw`` is required if the backend driver is configured to use ``etcd3+http://``. Supported drivers are listed at `Tooz drivers `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/configuration/sample_policy.rst0000664000175100017510000000137015033036143023735 0ustar00mylesmyles=========================== Sample Masakari Policy File =========================== .. warning:: JSON formatted policy file is deprecated since Masakari 11.0.0 (Wallaby). This `oslopolicy-convert-json-to-yaml`__ tool will migrate your existing JSON-formatted policy file to YAML in a backward-compatible way. .. __: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html The following is a sample masakari policy file for adaptation and use. The sample policy can also be viewed in :download:`file form `. .. important:: The sample policy file is auto-generated from masakari when this documentation is built. .. literalinclude:: /_static/masakari.policy.yaml.sample ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5247102 masakari-19.1.0.dev18/doc/source/contributor/0000775000175100017510000000000015033036146020050 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/contributor/code_structure.rst0000664000175100017510000000513615033036143023636 0ustar00mylesmyles.. Copyright 2020 Leafcloud B.V. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ======================= Masakari Code Structure ======================= Getting started with any codebase requires some getting used to the layout of the project, this guide is meant to make your journey navigating the code easier. All paths are relative to the repository root. Code layout =========== The Python source code for the project can be found in ``masakari``: - ``masakari/api`` contains the api service, - ``masakari/engine`` contains the engine service. The data model ============== The oslo objects ---------------- The base datamodel can be found in ``masakari/objects``. It uses ``oslo_versionedobjects``. These objects are used throughout the code, including RPC, REST API and database persistence. The oslo notifications ---------------------- The datamodel for oslo notifications (not to be confused with Masakari notifications which are one type of Masakari data objects) can be found in ``masakari/notifications``. The REST API ------------ Mappings of the models for the API are in ``masakari/api/openstack/ha/schemas``. The controllers are in ``masakari/api/openstack/ha``. The implementations of the actions are in ``masakari/ha/api.py``. The database (persistence) -------------------------- Some objects can be persisted (saved) to the database, currently only ``sqlalchemy`` is supported as the backend. The general interface is in ``masakari/db/api.py``. The sqlalchemy implementation is in ``masakari/db/sqlalchemy/api.py``. Database mappings are in ``masakari/db/sqlalchemy/models.py``. The entry points ================ The Masakari project has a variety of entry points. The entry points can be found in the ``entry_points`` section of ``setup.cfg``. The main entry points --------------------- The main entry points are for the engine and the api: - ``masakari.cmd.api:main``, - ``masakari.cmd.engine:main``. Another interesting one is the entry point for the management CLI, ``masakari.cmd.manage:main``. The tests ========= The tests are located in: ``masakari/tests``. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/contributor/contributing.rst0000664000175100017510000000347615033036143023320 0ustar00mylesmyles============================ So You Want to Contribute... ============================ For general information on contributing to OpenStack, please check out the `contributor guide `_ to get started. It covers all the basics that are common to all OpenStack projects: the accounts you need, the basics of interacting with our Gerrit review system, how we communicate as a community, etc. Below will cover the more project specific information you need to get started with masakari. Communication ~~~~~~~~~~~~~ * IRC channel #openstack-masakari at OFTC * Mailing list (prefix subjects with ``[masakari]`` for faster responses) http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-discuss Contacting the Core Team ~~~~~~~~~~~~~~~~~~~~~~~~ Please refer the `masakari Core Team `_ contacts. New Feature Planning ~~~~~~~~~~~~~~~~~~~~ masakari features are tracked on `Launchpad `_. Task Tracking ~~~~~~~~~~~~~ We track our tasks in `Launchpad `_. If you're looking for some smaller, easier work item to pick up and get started on, search for the 'low-hanging-fruit' tag. Reporting a Bug ~~~~~~~~~~~~~~~ You found an issue and want to make sure we are aware of it? You can do so on `Launchpad `_. Getting Your Patch Merged ~~~~~~~~~~~~~~~~~~~~~~~~~ All changes proposed to the masakari project require one or two +2 votes from masakari core reviewers before one of the core reviewers can approve patch by giving ``Workflow +1`` vote. Project Team Lead Duties ~~~~~~~~~~~~~~~~~~~~~~~~ All common PTL duties are enumerated in the `PTL guide `_. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/contributor/release_notes.rst0000664000175100017510000001215115033036143023427 0ustar00mylesmyles.. _release-notes: ============= Release notes ============= Introduction ~~~~~~~~~~~~ Masakari uses the following release notes sections: - ``features`` --- for new features or functionality; these should ideally refer to the blueprint being implemented; - ``fixes`` --- for fixes closing bugs; these must refer to the bug being closed; - ``upgrade`` --- for notes relevant when upgrading from the previous version; these should ideally be added only between major versions; required when the proposed change affects behaviour in a non-backwards-compatible way or generally changes something impactful; - ``deprecations`` --- to track deprecated features; relevant changes may consist of only the commit message and the release note; - ``prelude`` --- filled in by the PTL before each release or RC. Other release note types may be applied per common sense. Each change should include a release note unless being a ``TrivialFix`` change or affecting only docs or CI. Such changes should `not` include a release note to avoid confusion. Remember release notes are mostly for end users which, in case of Masakari, are OpenStack administrators/operators as well as cloud users. In case of doubt, the core team will let you know what is required. To add a release note, run the following command: .. code-block:: console tox -e venv -- reno new All release notes can be inspected by browsing ``releasenotes/notes`` directory. Further on this page we show reno templates, examples and how to make use of them. .. note:: The term `release note` is often abbreviated to `reno` as it is the name of the tool that is used to manage the release notes. To generate renos in HTML format in ``releasenotes/build``, run: .. code-block:: console tox -e releasenotes Note this requires the release note to be tracked by ``git`` so you have to at least add it to the ``git``'s staging area. The release notes are linted in the CI system. To lint locally, run: .. code-block:: console tox -e doc8 The above lints all of documentation at once. Templates and examples ~~~~~~~~~~~~~~~~~~~~~~ All approved release notes end up being published on a dedicated site: https://docs.openstack.org/releasenotes/masakari/ When looking for examples, it is advised to consider browsing the page above for a similar type of change and then comparing with their source representation in ``releasenotes/notes``. The sections below give further guidelines. Please try to follow them but note they are not set in stone and sometimes a different wording might be more appropriate. In case of doubt, the core team will be happy to help. Features -------- Template ++++++++ .. path releasenotes/templates/feature.yml .. code-block:: yaml --- features: - | Implements [some feature]. [Can be described using multiple sentences if necessary.] [Limitations worth mentioning can be included as well.] `Blueprint [blueprint id] `__ .. note:: The blueprint can be mentioned even if the change implements it only partially. This can be emphasised by preceding the ``Blueprint`` word by ``Partial``. See the example below. Example +++++++ Implementing blueprint with id `proactive-failure-detectors`, we use ``reno`` to generate the scaffolded file: .. code-block:: console tox -e venv -- reno new --from-template releasenotes/templates/feature.yml blueprint-proactive-failure-detectors .. note:: Since we don't require blueprints for simple features, it is allowed to make up a blueprint-id-friendly string (like in the example here) ad-hoc for the proposed feature. Please then skip the ``blueprint-`` prefix to avoid confusion. And then fill it out with the following content: .. code-block:: yaml --- features: - | Implements support for a future generation of proactive failure detectors. However, please note we don't support alternative time streams yet. `Partial Blueprint proactive-failure-detectors `__ .. note:: The example above shows how to introduce a limitation. The limitation may be lifted in the same release cycle and it is OK to mention it nonetheless. Release notes can be edited later as long as they have not been shipped in an existing release or release candidate. Fixes ----- Template ++++++++ .. path releasenotes/templates/fix.yml .. code-block:: yaml --- fixes: - | Fixes [some bug]. [Can be described using multiple sentences if necessary.] [Possibly also giving the previous behaviour description.] `LP#[bug number] `__ Example +++++++ Fixing bug number `1234567890`, we use ``reno`` to generate the scaffolded file: .. code-block:: console tox -e venv -- reno new --from-template releasenotes/templates/fix.yml bug-1234567890 And then fill it out with the following content: .. code-block:: yaml --- fixes: - | Fixes everything. RLY `LP#1234567890 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/index.rst0000664000175100017510000000704715033036143017344 0ustar00mylesmyles.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================== Welcome to Masakari's documentation! ==================================== Masakari is an OpenStack project designed to ensure high availability of instances and compute processes running on hosts. This documentation is intended to help explain the current scope of the Masakari project and the architectural decisions made to support this scope. The documentation will include the future architectural roadmap and the current development process and policies. Masakari API References ======================= The `Masakari API `_ is extensive. We provide a concept guide which gives some of the high level details, as well as a more detailed API reference. Operator Guide ============== Architecture Overview --------------------- * :doc:`Masakari architecture `: An overview of how all the components in masakari work together. Installation ------------ A detailed install guide for masakari. .. toctree:: :maxdepth: 2 install/index Reference Material ------------------ * :doc:`Configuration Guide `: Information on configuration files. * :doc:`Custom Recovery Workflow Configuration Guide ` * :doc:`CLI Commands for Masakari `: The complete command reference for Masakari. * :doc:`Versioned Notifications `: This provides the list of existing versioned notifications with sample payloads. This will help newcomers understand basics of Masakari * `Nova Docs `_: A collection of guides for Nova. .. # NOTE(shilpasd): This is the section where we hide things that we don't # actually want in the table of contents but sphinx build would fail if # they aren't in the toctree somewhere. .. toctree:: :hidden: cli/index configuration/api-paste.ini.rst configuration/config.rst configuration/index.rst configuration/policy.rst configuration/recovery_config.rst configuration/recovery_workflow_custom_task.rst configuration/sample_policy.rst user/architecture.rst user/notifications.rst .. only:: html .. toctree:: :hidden: configuration/recovery_workflow_sample_config.rst configuration/sample_config.rst .. only:: html Contributor Guide ================= .. toctree:: :maxdepth: 2 user/how_to_get_involved user/process install/development.environment contributor/code_structure contributor/release_notes For Contributors ================ * If you are a new contributor to Masakari please refer: :doc:`contributor/contributing` .. toctree:: :hidden: contributor/contributing Search ====== * :ref:`search`: Search the contents of this document. * `OpenStack wide search `_: Search the wider set of OpenStack documentation, including forums. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5247102 masakari-19.1.0.dev18/doc/source/install/0000775000175100017510000000000015033036146017144 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/install/development.environment.rst0000664000175100017510000000554315033036143024567 0ustar00mylesmyles.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ====================== Development Quickstart ====================== This page describes how to setup and use a working Python development environment that can be used in developing masakari on Ubuntu. These instructions assume you're already familiar with git. Following these instructions will allow you to build the documentation and run the masakari unit tests. .. note:: For how to contribute to Masakari, refer: http://docs.openstack.org/infra/manual/developers.html Masakari uses the Gerrit code review system, refer: http://docs.openstack.org/infra/manual/developers.html#development-workflow Setup ===== There are two ways to create a development environment: using DevStack, or explicitly installing and cloning just what you need. Using DevStack -------------- To enable Masakari in DevStack, perform the following steps: Download DevStack ~~~~~~~~~~~~~~~~~ .. sourcecode:: bash export DEVSTACK_DIR=~/devstack git clone https://opendev.org/openstack/devstack.git $DEVSTACK_DIR Enable the Masakari plugin ~~~~~~~~~~~~~~~~~~~~~~~~~~ Enable the plugin by adding the following section to ``$DEVSTACK_DIR/local.conf`` .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Optionally, a git refspec (branch or tag or commit) may be provided as follows: .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Run the DevStack utility ~~~~~~~~~~~~~~~~~~~~~~~~ .. sourcecode:: bash cd $DEVSTACK_DIR ./stack.sh Explicit Install/Clone ---------------------- DevStack installs a complete OpenStack environment. Alternatively, to clone and install Masakari explicitly refer: :doc:`install_and_configure_ubuntu` Building the Documentation ========================== For a full documentation build, issue the following command from the masakari directory .. code-block:: bash tox -e docs That will create a Python virtual environment, install the needed Python prerequisites in that environment, and build all the documentation in that environment. Running unit tests ================== See `Running Python Unit Tests `_ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/install/index.rst0000664000175100017510000000016415033036143021003 0ustar00mylesmyles================= Masakari services ================= .. toctree:: overview install_and_configure verify ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/install/install_and_configure.rst0000664000175100017510000000203715033036143024226 0ustar00mylesmyles.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ===================== Install and configure ===================== This section describes how to install and configure Masakari services on the compute node. This section assumes that you already have a working OpenStack environment with the following components installed: Nova, Glance, Cinder, Neutron and Identity. The installation and configuration vary by distribution. .. toctree:: :maxdepth: 1 install_and_configure_ubuntu ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/install/install_and_configure_ubuntu.rst0000664000175100017510000002320515033036143025630 0ustar00mylesmyles.. _install-ubuntu: Install and configure for Ubuntu ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This section describes how to install and configure Masakari for Ubuntu 18.04 (bionic). Prerequisites ------------- Before you install and configure the masakari service, you must create databases, service credentials, and API endpoints. #. To create the masakari database, follow these steps: * Use the database access client to connect to the database server as the ``root`` user: .. code-block:: console # mysql * Create the ``masakari`` database: .. code-block:: console mysql> CREATE DATABASE masakari CHARACTER SET utf8; * Grant access privileges to the databases: .. code-block:: console mysql> GRANT ALL PRIVILEGES ON masakari.* TO 'username'@'localhost' \ IDENTIFIED BY 'MASAKARI_DBPASS'; mysql> GRANT ALL PRIVILEGES ON masakari.* TO 'username'@'%' \ IDENTIFIED BY 'MASAKARI_DBPASS'; Replace ``MASAKARI_DBPASS`` with a suitable password. * Exit the database access client. #. Source the ``admin`` credentials to gain access to admin-only CLI commands: .. code-block:: console $ . admin-openrc #. Create the Masakari service credentials: * Create the ``masakari`` user with password as ``masakari``: .. code-block:: console $ openstack user create --password-prompt masakari User Password: Repeat User Password: +---------------------+----------------------------------+ | Field | Value | +---------------------+----------------------------------+ | domain_id | default | | enabled | True | | id | 8a7dbf5279404537b1c7b86c033620fe | | name | masakari | | options | {} | | password_expires_at | None | +---------------------+----------------------------------+ * Add the ``admin`` role to the ``masakari`` user: .. code-block:: console $ openstack role add --project service --user masakari admin * Create the ``masakari`` service entity: .. code-block:: console $ openstack service create --name masakari \ --description "masakari high availability" instance-ha +-------------+----------------------------------+ | Field | Value | +-------------+----------------------------------+ | description | masakari high availability | | enabled | True | | id | 060d59eac51b4594815603d75a00aba2 | | name | masakari | | type | instance-ha | +-------------+----------------------------------+ #. Create the Masakari API service endpoints: .. code-block:: console $ openstack endpoint create --region RegionOne \ masakari public http:///instance-ha/v1/$\(tenant_id\)s +--------------+-------------------------------------------------------+ | Field | Value | +--------------+-------------------------------------------------------+ | enabled | True | | id | 38f7af91666a47cfb97b4dc790b94424 | | interface | public | | region | RegionOne | | region_id | RegionOne | | service_id | 060d59eac51b4594815603d75a00aba2 | | service_name | masakari | | service_type | instance-ha | | url | http:///instance-ha/v1/$(tenant_id)s | +--------------+-------------------------------------------------------+ $ openstack endpoint create --region RegionOne \ masakari internal http:///instance-ha/v1/$\(tenant_id\)s +--------------+-------------------------------------------------------+ | Field | Value | +--------------+-------------------------------------------------------+ | enabled | True | | id | 38f7af91666a47cfb97b4dc790b94424 | | interface | internal | | region | RegionOne | | region_id | RegionOne | | service_id | 060d59eac51b4594815603d75a00aba2 | | service_name | masakari | | service_type | instance-ha | | url | http:///instance-ha/v1/$(tenant_id)s | +--------------+-------------------------------------------------------+ $ openstack endpoint create --region RegionOne \ masakari admin http:///instance-ha/v1/$\(tenant_id\)s +--------------+-------------------------------------------------------+ | Field | Value | +--------------+-------------------------------------------------------+ | enabled | True | | id | 38f7af91666a47cfb97b4dc790b94424 | | interface | admin | | region | RegionOne | | region_id | RegionOne | | service_id | 060d59eac51b4594815603d75a00aba2 | | service_name | masakari | | service_type | instance-ha | | url | http:///instance-ha/v1/$(tenant_id)s | +--------------+-------------------------------------------------------+ Install and configure Masakari ------------------------------ .. note:: * You must install Masakari on the Controller Nodes only. #. Clone masakari using: .. code-block:: console # git clone https://opendev.org/openstack/masakari.git #. Prepare the masakari configuration files: #. Generate via tox: Go to ``/opt/stack/masakari`` and execute the command below. This will generate ``masakari.conf.sample``, a sample configuration file, at ``/opt/stack/masakari/etc/masakari/``: .. code-block:: console # tox -egenconfig #. Download from: # :download:`masakari.conf.sample ` #. Rename ``masakari.conf.sample`` file to ``masakari.conf``, and edit sections as shown below: .. code-block:: ini [DEFAULT] transport_url = rabbit://stackrabbit:admin@:5672/ graceful_shutdown_timeout = 5 os_privileged_user_tenant = service os_privileged_user_password = admin os_privileged_user_auth_url = http:///identity os_privileged_user_name = nova logging_exception_prefix = %(color)s%(asctime)s.%(msecs)03d TRACE %(name)s [01;35m%(instance)s[00m logging_debug_format_suffix = [00;33mfrom (pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d[00m logging_default_format_string = %(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s [[00;36m-%(color)s] [01;35m%(instance)s%(color)s%(message)s[00m logging_context_format_string = %(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s [[01;36m%(request_id)s [00;36m%(project_name)s %(user_name)s%(color)s] [01;35m%(instance)s%(color)s%(message)s[00m use_syslog = False debug = True masakari_api_workers = 2 [database] connection = mysql+pymysql://root:admin@1/masakari?charset=utf8 [keystone_authtoken] memcached_servers = localhost:11211 cafile = /opt/stack/data/ca-bundle.pem project_domain_name = Default project_name = service user_domain_name = Default password = username = masakari auth_url = http:///identity auth_type = password [taskflow] connection = mysql+pymysql://root:admin@/masakari?charset=utf8 .. note:: Replace ``CONTROLLER_IP`` with the IP address of controller node. Replace ``MASAKARI_PASS`` with the password you chose for the ``masakari`` user in the Identity service. #. Create ``masakari`` directory in /etc/: Copy ``masakari.conf`` file to ``/etc/masakari/`` .. code-block:: console # cp -p etc/masakari/masakari.conf.sample /etc/masakari/masakari.conf #. To install masakari run setup.py from masakari: .. code-block:: console # cd masakari # sudo python -m pip install -r requirements.txt # sudo python setup.py install #. Run below db command to sync database: .. code-block:: console # masakari-manage db sync Finalize installation --------------------- * Start masakari services: .. code-block:: console # masakari-api # masakari-engine ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/install/overview.rst0000664000175100017510000000311615033036143021542 0ustar00mylesmyles========================= Masakari service overview ========================= Masakari provides Virtual Machines High Availability(VMHA), and rescues KVM-based Virtual Machines(VM) from a failure events described below: * ``VM process down`` - restart vm (use nova stop API, and nova start API). Libvirt events will be also emitted by other failures. * ``Provisioning process down`` - restarts process, changes nova-compute service status to maintenance mode (use nova service-disable). * ``nova-compute host failure`` - evacuate all the VMs from failure host according to the following recovery methods (use nova evacuate API). * ``auto`` - evacuate all the VMs with no destination node for nova scheduler. * ``reserved_host`` - evacuate all the VMs with reserved hosts as the destination nodes for nova scheduler. * ``auto_priority`` - evacuate all the VMs by using ``auto`` recovery method firstly. If failed, then using ``reserved_host`` recovery method. * ``rh_priority`` - evacuate all the VMs by using ``reserved_host`` recovery method firstly. If failed, then using ``auto`` recovery method. The below services enables deplores to integrate with the Masakari directly or through custom plug-ins. The Masakari service consists of the following components: ``masakari-api`` An OpenStack-native REST API that processes API requests by sending them to the ``masakari-engine`` over `Remote Procedure Call (RPC)`. ``masakari-engine`` Processes the notifications received from ``masakari-api`` by executing the recovery workflow in asynchronous way. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/install/verify.rst0000664000175100017510000001123615033036143021202 0ustar00mylesmylesVerify operation ~~~~~~~~~~~~~~~~ Verify Masakari installation. #. Source the ``admin`` credentials to gain access to admin-only CLI commands: .. code-block:: console $ . admin-openrc #. List API endpoints in the Identity service to verify connectivity with the Identity service: .. note:: Below endpoints list may differ depending on the installation of OpenStack components. .. code-block:: console $ openstack endpoint list +-------------+----------------+--------------------------------------------------------+ | Name | Type | Endpoints | +-------------+----------------+--------------------------------------------------------+ | nova_legacy | compute_legacy | RegionOne | | | | public: http://controller/compute/v2/ | | | | | | nova | compute | RegionOne | | | | public: http://controller/compute/v2.1 | | | | | | cinder | block-storage | RegionOne | | | | public: http://controller/volume/v3/ | | | | | | glance | image | RegionOne | | | | public: http://controller/image | | | | | | cinderv3 | volumev3 | RegionOne | | | | public: http://controller/volume/v3/ | | | | | | masakari | instance-ha | RegionOne | | | | internal: http://controller/instance-ha/v1/ | | | | RegionOne | | | | admin: http://controller/instance-ha/v1/ | | | | RegionOne | | | | public: http://controller/instance-ha/v1/ | | | | | | keystone | identity | RegionOne | | | | public: http://controller/identity | | | | RegionOne | | | | admin: http://controller/identity | | | | | | cinderv2 | volumev2 | RegionOne | | | | public: http://controller/volume/v2/ | | | | | | placement | placement | RegionOne | | | | public: http://controller/placement | | | | | | neutron | network | RegionOne | | | | public: http://controller:9696/ | | | | | +-------------+----------------+--------------------------------------------------------+ #. Run ``segment list`` command to verify masakari-api is running properly. This will return empty segment list as you haven't yet configured ``Failover segments``. .. code-block:: console $ openstack segment list .. note:: Since ``Failover segments`` are not configured, there is no way to verify masakari-engine is running properly as the notification cannot be sent from masakari-api to masakari-engine. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5247102 masakari-19.1.0.dev18/doc/source/user/0000775000175100017510000000000015033036146016454 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/user/architecture.rst0000664000175100017510000000333415033036143021670 0ustar00mylesmyles.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Masakari System Architecture ============================ Masakari comprises of two services api and engine, each performing different functions. The user-facing interface is a REST API, while internally Masakari communicates via an RPC message passing mechanism. The API servers process REST requests, which typically involve database reads/writes, sending RPC messages to other Masakari engine, and generating responses to the REST calls. RPC messaging is done via the **oslo.messaging** library, an abstraction on top of message queues. The Masakari engine will run on the same host where the Masakari api is running, and has a `manager` that is listening for `RPC` messages. The manager too has periodic tasks. Components ---------- Below you will find a helpful explanation of the key components of a typical Masakari deployment. .. image:: /_static/architecture.png :width: 100% * DB: sql database for data storage. * API: component that receives HTTP requests, converts commands and communicates with masakari engine via the **oslo.messaging** queue. * Engine: Executes recovery workflow and communicates with nova via HTTP. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/user/how_to_get_involved.rst0000664000175100017510000003025715033036143023256 0ustar00mylesmyles.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _getting_involved: ======================================== How to get (more) involved with Masakari ======================================== So you want to get more involved with Masakari? Or you are new to Masakari and wondering where to start? We are working on building easy ways for you to get help and ideas on how to learn more about Masakari and how the Masakari community works. How do I get started? ===================== There are quite a few global docs on this: - http://www.openstack.org/assets/welcome-guide/OpenStackWelcomeGuide.pdf - https://wiki.openstack.org/wiki/How_To_Contribute - http://www.openstack.org/community/ There is more general info, non Masakari specific info here: - https://wiki.openstack.org/wiki/Mentors - https://wiki.openstack.org/wiki/OpenStack_Upstream_Training What should I work on? ~~~~~~~~~~~~~~~~~~~~~~ So you are starting out your Masakari journey, where is a good place to start? If you'd like to learn how Masakari works before changing anything (good idea!), we recommend looking for reviews with -1s and -2s and seeing why they got down voted. Once you have some understanding, start reviewing patches. It's OK to ask people to explain things you don't understand. It's also OK to see some potential problems but put a +0. Once you're ready to write code, take a look at some of the work already marked as low-hanging fruit: * https://bugs.launchpad.net/masakari/+bugs?field.tag=low-hanging-fruit How do I get my feature in? ~~~~~~~~~~~~~~~~~~~~~~~~~~~ The best way of getting your feature in is... well it depends. First concentrate on solving your problem and/or use case, don't fixate on getting the code you have working merged. It's likely things will need significant re-work after you discuss how your needs match up with all the existing ways Masakari is currently being used. The good news, is this process should leave you with a feature that's more flexible and doesn't lock you into your current way of thinking. A key part of getting code merged, is helping with reviewing other people's code. Great reviews of others code will help free up more core reviewer time to look at your own patches. In addition, you will understand how the review is thinking when they review your code. Also, work out if any ongoing efforts are blocking your feature and helping out speeding those up. The spec review process should help with this effort. For more details on our process, please see: :ref:`process`. What is expected of a good contributor? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ TODO - need more info on this Top Tips for working with the Masakari community ================================================ Here are some top tips around engaging with the Masakari community: - IRC - we talk a lot in #openstack-masakari - do ask us questions in there, and we will try to help you - not sure about asking questions? feel free to listen in around other people's questions - we recommend you setup an IRC bouncer: https://wiki.openstack.org/wiki/IRC - Email - Use the [masakari] tag in the mailing lists - Filtering on [masakari] and [all] can help tame the list - Be Open - i.e. don't review your teams code in private, do it publicly in gerrit - i.e. be ready to talk about openly about problems you are having, not "theoretical" issues - that way you can start to gain the trust of the wider community - Got a problem? Please ask! - Please raise any problems and ask questions early - we want to help you before you are frustrated or annoyed - unsure who to ask? Just ask in IRC. - Talk about problems first, then solutions - Don't think about "merging your patch", instead think about "solving your problem" - conversations are more productive that way - It's not the decision that's important, it's the reason behind it that's important - Don't like the way the community is going? - Please ask why we were going that way, and please engage with the debate - If you don't, we are unable to learn from what you have to offer - No one will decide, this is stuck, who can help me? - it's rare, but it happens - ...but if you don't ask, it's hard for them to help you Process ======= It can feel like you are faced with a wall of process. We are a big community, to make sure the right communication happens, we do use a minimal amount of process. If you find something that doesn't make sense, please: - ask questions to find out \*why\* it happens - if you know of a better way to do it, please speak up - one "better way" might be to remove the process if it no longer helps To learn more about Masakari's process, please read :ref:`process`. Why bother with any process? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Why is it worth creating a bug or blueprint to track your code review? This may seem like silly process, but there is usually a good reason behind it. We have lots of code to review, and we have tools to try and get to really important code reviews first. If yours is really important, but not picked up by our tools, it's possible you just get lost in the bottom of a big queue. If you have a bug fix, you have done loads of work to identify the issue, and test out your fix, and submit it. By adding a bug report, you are making it easier for other folks who hit the same problem to find your work, possibly saving them the hours of pain you went through. With any luck that gives all those people the time to fix different bugs, all that might have affected you, if you had not given them the time go fix it. It's similar with blueprints. You have worked out how to scratch your itch, lets tell others about that great new feature you have added, so they can use that. Also, it stops someone with a similar idea going through all the pain of creating a feature only to find you already have that feature ready and up for review, or merged into the latest release. Hopefully this gives you an idea why we have applied a small layer of process to what we are doing. Having said all this, we need to unlearn old habits to move forward, there may be better ways to do things, and we are open to trying them. Please help be part of the solution. .. _why_plus1: Why do code reviews if I am not in masakari-core? ================================================= Code reviews are the life blood of the developer community. There is a good discussion on how you do good reviews, and how anyone can be a reviewer: http://docs.openstack.org/infra/manual/developers.html#peer-review In the draft process guide, I discuss how doing reviews can help get your code merged faster: :ref:`process`. Let's look at some of the top reasons why participating with code reviews really helps you: - Doing more reviews, and seeing what other reviewers notice, will help you better understand what is expected of code that gets merged into master - Having more non-core people do great reviews, leaves less review work for the core reviewers to do, so we are able get more code merged - Empathy is one of the keys to a happy community. If you are used to doing code reviews, you will better understand the comments you get when people review your code. As you do more code reviews, and see what others notice, you will get a better idea of what people are looking for when then apply a +2 to your code. What are the most useful types of code review comments? Well here are a few to the top ones: - Fundamental flaws are the biggest thing to spot. Does the patch break a whole set of existing users, or an existing feature? - Consistency of behavior is really important. Does this bit of code do things differently to where similar things happen elsewhere in Masakari? - Is the code easy to maintain, well tested and easy to read? Code is read order of magnitude times more than it is written, so optimize for the reader of the code, not the writer. Let's look at some problems people hit when starting out doing code reviews: - My +1 doesn't mean anything, why should I bother? - So your +1 really does help. Some really useful -1 votes that lead to a +1 vote helps get code into a position - When to use -1 vs 0 vs +1 - Please see the guidelines here: http://docs.openstack.org/infra/manual/developers.html#peer-review - I have already reviewed this code internally, no point in adding a +1 externally? - Please talk to your company about doing all code reviews in the public, that is a much better way to get involved. Showing how the code has evolved upstream, is much better than trying to 'perfect' code internally, before uploading for public review. You can use Draft mode, and mark things as WIP if you prefer, but please do the reviews upstream. - Where do I start? What should I review? - There are various tools, but a good place to start is: https://etherpad.openstack.org/p/masakari-pike-workitems - Depending on the time in the cycle, it's worth looking at NeedsCodeReview blueprints: https://blueprints.launchpad.net/masakari/ - Maybe take a look at things you want to see merged, bug fixes and features, or little code fixes - Look for things that have been waiting a long time for a review: - If you get through the above lists, try other tools, such as: http://status.openstack.org/reviews How to do great code reviews? ============================= http://docs.openstack.org/infra/manual/developers.html#peer-review For more tips, please see: `Why do code reviews if I am not in masakari-core?`_ How do I become masakari-core? ============================== You don't have to be masakari-core to be a valued member of the Masakari community. There are many, many ways you can help. Every quality review that helps someone get their patch closer to being ready to merge helps everyone get their code merged faster. The first step to becoming masakari-core is learning how to be an active member of the Masakari community, including learning how to do great code reviews. If you feel like you have the time to commit to all the masakari-core membership expectations, reach out to the Masakari PTL who will be able to find you an existing member of masakari-core to help mentor you. If all goes well, and you seem like a good candidate, your mentor will contact the rest of the masakari-core team to ask them to start looking at your reviews, so they are able to vote for you, if you get nominated for join masakari-core. We encourage all mentoring, where possible, to occur on #openstack-masakari so everyone can learn and benefit from your discussions. The above mentoring is available to everyone who wants to learn how to better code reviews, even if you don't ever want to commit to becoming masakari-core. If you already have a mentor, that's great, the process is only there for folks who are still trying to find a mentor. Being admitted to the mentoring program no way guarantees you will become a member of masakari-core eventually, it's here to help you improve, and help you have the sort of involvement and conversations that can lead to becoming a member of masakari-core. .. note:: You can try using ``masakari-ptl`` and/or ``masakari-core`` in your IRC message to get a response from the desired people. .. note:: For basic information on Masakari's governance, including the current PTL (Project Team Lead), please visit `Masakari's governance page `__. To see the current list of Masakari core reviewers (aka cores), see the `masakari-core group on Gerrit `__. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/user/notifications.rst0000664000175100017510000001221115033036143022051 0ustar00mylesmyles.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Notifications in Masakari ========================== Similar to other OpenStack services Masakari emits notifications to the message bus with the Notifier class provided by :oslo.messaging-doc:`oslo.messaging `. From the notification consumer point of view a notification consists of two parts: an envelope with a fixed structure defined by oslo.messaging and a payload defined by the service emitting the notification. The envelope format is the following:: { "priority": , "event_type": , "timestamp": , "publisher_id": , "message_id": , "payload": } oslo.messaging provides below choices of notification drivers: =============== ========================================================================== Driver Description =============== ========================================================================== messaging Send notifications using the 1.0 message format messagingv2 Send notifications using the 2.0 message format (with a message envelope) routing Configurable routing notifier (by priority or event_type) log Publish notifications via Python logging infrastructure test Store notifications in memory for test verification noop Disable sending notifications entirely =============== ========================================================================== So notifications can be completely disabled by setting the following in Masakari configuration file: .. code-block:: ini [oslo_messaging_notifications] driver = noop Masakari supports only Versioned notifications. Versioned notifications ----------------------- Masakari code uses the masakari.rpc.get_notifier call to get a configured oslo.messaging Notifier object and it uses the oslo provided functions on the Notifier object to emit notifications. The configuration of the returned Notifier object depends on the parameters of the get_notifier call and the value of the oslo.messaging configuration options ``driver`` and ``topics``. The versioned notification payload is not a free form dictionary but a serialized :oslo.versionedobjects-doc:`oslo.versionedobjects <>`. .. _service.update: For example the wire format of the ``segment.update`` notification looks like the following:: { "event_type": "api.update.segments.start", "timestamp": "2018-11-27 14:32:20.396940", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, " "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:test-virtualbox", "message_id": "e6322900-025d-4dd6-a3a1-3e0e1e9badeb" } The serialized oslo versionedobject as a payload provides a version number to the consumer so the consumer can detect if the structure of the payload is changed. Masakari provides the following contract regarding the versioned notification payload: * the payload version defined by the ``masakari_object.version`` field of the payload will be increased only if the syntax or the semantics of the ``masakari_object.data`` field of the payload is changed. * a minor version bump indicates a backward compatible change which means that only new fields are added to the payload so a well written consumer can still consume the new payload without any change. * a major version bump indicates a backward incompatible change of the payload which can mean removed fields, type change, etc in the payload. * there is an additional field ``masakari_object.name`` for every payload besides ``masakari_object.data`` and ``masakari_object.version``. This field contains the name of the Masakari internal representation of the payload type. Client code should not depend on this name. Existing versioned notifications ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * This provides the list of existing versioned notifications with sample payloads. .. versioned_notifications:: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/doc/source/user/process.rst0000664000175100017510000001646315033036143020673 0ustar00mylesmyles.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _process: ===================== Masakari team process ===================== Masakari is always evolving its processes to ensure productive communication between all members of our community easily. OpenStack Wide Patterns ======================= Masakari follows most of the generally adopted norms for OpenStack projects. You can get more details here: * https://docs.openstack.org/infra/manual/developers.html * https://docs.openstack.org/project-team-guide/ If you are new to Masakari, please read this first: :ref:`getting_involved`. How do I get my code merged? ============================ OK, so you are new to Masakari, and you have been given a feature to implement. How do I make that happen? You can get most of your questions answered here: - https://docs.openstack.org/infra/manual/developers.html But let's put a Masakari specific twist on things... Overview ~~~~~~~~ .. image:: /_static/Masakari_spec_process.svg :alt: Flow chart showing the Masakari bug/feature process Where do you track bugs? ~~~~~~~~~~~~~~~~~~~~~~~~ We track bugs here: - https://bugs.launchpad.net/masakari If you fix an issue, please raise a bug so others who spot that issue can find the fix you kindly created for them. Also before submitting your patch it's worth checking to see if someone has already fixed it for you (Launchpad helps you with that, at little, when you create the bug report). When do I need a blueprint vs. a spec? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To understand this question, we need to understand why blueprints and specs are useful. But here is the rough idea: - if it needs a spec, it will need a blueprint. - if it's an API change, it needs a spec. - if it's a single small patch that touches a small amount of code, with limited deployer and doc impact, it probably doesn't need a spec. If you are unsure, please ask the PTL (masakari-ptl) or one of the other masakari-core on IRC. How do I get my blueprint approved? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ So you need your blueprint approved? Here is how: - if you don't need a spec, please add a link to your blueprint to the agenda for the next masakari meeting: https://wiki.openstack.org/wiki/Meetings/Masakari - be sure your blueprint description has enough context for the review in that meeting. - if you need a spec, then please submit a masakari-spec for review. Got any more questions? Contact the PTL (masakari-ptl) or one of the other masakari-core who are awake at the same time as you. IRC is best as you will often get an immediate response. If they are too busy, send them an email. How do I get a procedural -2 removed from my patch? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When feature freeze hits, any patches for blueprints that are still in review get a procedural -2 to stop them merging. In Masakari a blueprint is only approved for a single release. To have the -2 removed, you need to get the blueprint approved for the current release (see `How do I get my blueprint approved?`_). My code review seems stuck, what can I do? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ First and foremost - address any -1s and -2s! A few tips: - Be precise. Ensure you're not talking at cross purposes. - Try to understand where the reviewer is coming from. They may have a very different perspective and/or use-case to you. - If you don't understand the problem, ask them to explain - this is common and helpful behavior. - Be positive. Everyone's patches have issues, including core reviewers. No-one cares once the issues are fixed. - Try not to flip-flop. When two reviewers are pulling you in different directions, stop pushing code and negotiate the best way forward. - If the reviewer does not respond to replies left on the patchset, reach out to them on IRC or email. If they still don't respond, you can try to ask their colleagues if they're on holiday (or simply wait). Finally, you can ask for mediation in the Masakari meeting by adding it to the agenda (https://wiki.openstack.org/wiki/Meetings/Masakari). This is also what you should do if you are unable to negotiate a resolution to an issue. Eventually you should get some +1s from people working through the review queue. Expect to get -1s as well. You can ask for reviews within your company, 1-2 are useful (not more), especially if those reviewers are known to give good reviews. You can spend some time while you wait reviewing other people's code - they may reciprocate and you may learn something (:ref:`Why do code reviews when I'm not core? `). If you've waited an appropriate amount of time and you haven't had any +1s, you can ask on IRC for reviews. Please don't ask for core review straight away, especially not directly (IRC or email). Core reviewer time is very valuable and gaining some +1s is a good way to show your patch meets basic quality standards. Once you have a few +1s, be patient. Remember the average wait times. You can ask for reviews each week in IRC, it helps to ask when cores are awake. Bugs ---- It helps to apply correct tracking information. - Put "Closes-Bug", "Partial-Bug" or "Related-Bug" in the commit message tags as necessary. - If you have to raise a bug in Launchpad first, do it - this helps someone else find your fix. - Make sure the bug has the correct priority and tag set. Features -------- Again, it helps to apply correct tracking information. For blueprint-only features: - Put your blueprint in the commit message, EG "blueprint simple-feature". - Mark the blueprint as NeedsCodeReview if you are finished. - Maintain the whiteboard on the blueprint so it's easy to understand which patches need reviews. - Use a single topic for all related patches. All patches for one blueprint should share a topic. For blueprint and spec features, do everything for blueprint-only features and also: - If it's a project or subteam priority, add it to: https://etherpad.openstack.org/p/masakari-pike-workitems - Ensure your spec is approved for the current release cycle. If it's not a priority, your blueprint/spec has been approved for the cycle and you have been patient, you can raise it during the Masakari meeting. The outcome may be that your spec gets unapproved for the cycle, so that priority items can take focus. If this happens to you, sorry - it should not have been approved in the first place, Masakari team bit off more than they could chew, it is their mistake not yours. You can re-propose it for the next cycle. If it's not a priority and your spec has not been approved, your code will not merge this cycle. Please re-propose your spec for the next cycle. Release notes ------------- Release notes are covered on their own page: :doc:`Release notes ` ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5187101 masakari-19.1.0.dev18/etc/0000775000175100017510000000000015033036146014204 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5247102 masakari-19.1.0.dev18/etc/masakari/0000775000175100017510000000000015033036146015774 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/etc/masakari/README-masakari.conf.txt0000664000175100017510000000021015033036143022172 0ustar00mylesmylesTo generate the sample masakari.conf file, run the following command from the top level of the masakari directory: tox -egenconfig ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/etc/masakari/api-paste.ini0000664000175100017510000000261615033036143020362 0ustar00mylesmyles[composite:masakari_api] use = call:masakari.api.urlmap:urlmap_factory /: apiversions /v1: masakari_api_v1 [composite:masakari_api_v1] use = call:masakari.api.auth:pipeline_factory_v1 keystone = cors http_proxy_to_wsgi request_id faultwrap sizelimit authtoken keystonecontext osapi_masakari_app_v1 noauth2 = cors http_proxy_to_wsgi request_id faultwrap sizelimit noauth2 osapi_masakari_app_v1 # filters [filter:cors] paste.filter_factory = oslo_middleware.cors:filter_factory oslo_config_project = masakari [filter:http_proxy_to_wsgi] paste.filter_factory = oslo_middleware.http_proxy_to_wsgi:HTTPProxyToWSGI.factory [filter:request_id] paste.filter_factory = oslo_middleware:RequestId.factory [filter:faultwrap] paste.filter_factory = masakari.api.openstack:FaultWrapper.factory [filter:sizelimit] paste.filter_factory = oslo_middleware:RequestBodySizeLimiter.factory [filter:authtoken] paste.filter_factory = keystonemiddleware.auth_token:filter_factory [filter:keystonecontext] paste.filter_factory = masakari.api.auth:MasakariKeystoneContext.factory [filter:noauth2] paste.filter_factory = masakari.api.auth:NoAuthMiddleware.factory # apps [app:osapi_masakari_app_v1] paste.app_factory = masakari.api.openstack.ha:APIRouterV1.factory [pipeline:apiversions] pipeline = faultwrap http_proxy_to_wsgi apiversionsapp [app:apiversionsapp] paste.app_factory = masakari.api.openstack.ha.versions:Versions.factory ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/etc/masakari/masakari-config-generator.conf0000664000175100017510000000060715033036143023662 0ustar00mylesmyles[DEFAULT] output_file = etc/masakari/masakari.conf.sample wrap_width = 80 namespace = keystonemiddleware.auth_token namespace = masakari.conf namespace = oslo.db namespace = oslo.db.concurrency namespace = oslo.log namespace = oslo.messaging namespace = oslo.middleware namespace = oslo.policy namespace = oslo.service.service namespace = oslo.service.wsgi namespace = oslo.versionedobjects ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/etc/masakari/masakari-custom-recovery-methods.conf0000664000175100017510000000677715033036143025256 0ustar00mylesmyles[DEFAULT] [taskflow_driver_recovery_flows] # # From customized_recovery_flow_opts # # # This option allows operator to customize tasks to be executed for host failure # auto recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the host failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below three tasks will be part of this config option:- # 1. disable_compute_service_task # 2. prepare_HA_enabled_instances_task # 3. evacuate_instances_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) host_auto_failure_recovery_tasks = main:['prepare_HA_enabled_instances_task'],post:['evacuate_instances_task'],pre:['disable_compute_service_task'] # # This option allows operator to customize tasks to be executed for host failure # reserved_host recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the host failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below three tasks will be part of this config option:- # 1. disable_compute_service_task # 2. prepare_HA_enabled_instances_task # 3. evacuate_instances_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) host_rh_failure_recovery_tasks = main:['prepare_HA_enabled_instances_task', 'evacuate_instances_task'],post:[],pre:['disable_compute_service_task'] # # This option allows operator to customize tasks to be executed for instance # failure recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the instance failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below three tasks will be part of this config option:- # 1. stop_instance_task # 2. start_instance_task # 3. confirm_instance_active_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) instance_failure_recovery_tasks = main:['start_instance_task'],post:['confirm_instance_active_task'],pre:['stop_instance_task'] # # This option allows operator to customize tasks to be executed for process # failure recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the process failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below two tasks will be part of this config option:- # 1. disable_compute_node_task # 2. confirm_compute_node_disabled_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) process_failure_recovery_tasks = main:['confirm_compute_node_disabled_task'],post:[],pre:['disable_compute_node_task'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/etc/masakari/masakari-customized-recovery-flow-config-generator.conf0000664000175100017510000000021315033036143030640 0ustar00mylesmyles[DEFAULT] wrap_width = 80 output_file = etc/masakari/masakari-custom-recovery-methods.conf.sample namespace = customized_recovery_flow_opts././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/etc/masakari/masakari-policy-generator.conf0000664000175100017510000000011515033036143023706 0ustar00mylesmyles[DEFAULT] output_file = etc/masakari/policy.yaml.sample namespace = masakari ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52571 masakari-19.1.0.dev18/masakari/0000775000175100017510000000000015033036146015221 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/__init__.py0000664000175100017510000000211215033036143017323 0ustar00mylesmyles# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ :mod:`masakari` -- Cloud IaaS Platform =================================== .. automodule:: masakari :platform: Unix :synopsis: Infrastructure-as-a-Service Cloud platform. """ import os os.environ['EVENTLET_NO_GREENDNS'] = 'yes' # NOTE(rpodolyaka): import oslo_service first, so that it makes eventlet hub # use a monotonic clock to avoid issues with drifts of system time (see # LP 1510234 for details) import oslo_service # noqa import eventlet # noqa ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52571 masakari-19.1.0.dev18/masakari/api/0000775000175100017510000000000015033036146015772 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/__init__.py0000664000175100017510000000000015033036143020066 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/api_version_request.py0000664000175100017510000001510315033036143022427 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from masakari import exception from masakari.i18n import _ # Define the minimum and maximum version of the API across all of the # REST API. The format of the version is: # X.Y where: # # - X will only be changed if a significant backwards incompatible API # change is made which affects the API as whole. That is, something # that is only very very rarely incremented. # # - Y when you make any change to the API. Note that this includes # semantic changes which may not affect the input or output formats or # even originate in the API code layer. We are not distinguishing # between backwards compatible and backwards incompatible changes in # the versioning system. It must be made clear in the documentation as # to what is a backwards compatible change and what is a backwards # incompatible one. # # You must update the API version history string below with a one or # two line description as well as update rest_api_version_history.rst REST_API_VERSION_HISTORY = """REST API Version History: * 1.0 - Initial version. * 1.1 - Add support for getting notification progress details. * 1.2 - Add enabled option to segment. * 1.3 - Add masakari vmoves. """ # The minimum and maximum versions of the API supported # The default api version request is defined to be the # the minimum version of the API supported. # Note: This only applies for the v1 API once microversions # support is fully merged. _MIN_API_VERSION = "1.0" _MAX_API_VERSION = "1.3" # The default api version request if none is requested in the headers DEFAULT_API_VERSION = _MIN_API_VERSION # NOTE: min and max versions declared as functions so we can # mock them for unittests. Do not use the constants directly anywhere # else. def min_api_version(): return APIVersionRequest(_MIN_API_VERSION) def max_api_version(): return APIVersionRequest(_MAX_API_VERSION) def is_supported(req, min_version=_MIN_API_VERSION, max_version=_MAX_API_VERSION): """Check if API request version satisfies version restrictions. :param req: request object :param min_version: minimal version of API needed for correct request processing :param max_version: maximum version of API needed for correct request processing :returns True if request satisfies minimal and maximum API version requirements. False in other case. """ return (APIVersionRequest(max_version) >= req.api_version_request >= APIVersionRequest(min_version)) class APIVersionRequest(object): """This class represents an API Version Request with convenience methods for manipulation and comparison of version numbers that we need to do to implement microversions. """ def __init__(self, version_string=None): """Create an API version request object. :param version_string: String representation of APIVersionRequest. Correct format is 'X.Y', where 'X' and 'Y' are int values. None value should be used to create Null APIVersionRequest, which is equal to 0.0 """ self.ver_major = 0 self.ver_minor = 0 if version_string is not None: match = re.match(r"^([1-9]\d*)\.([1-9]\d*|0)$", version_string) if match: self.ver_major = int(match.group(1)) self.ver_minor = int(match.group(2)) else: raise exception.InvalidAPIVersionString(version=version_string) def __str__(self): """Debug/Logging representation of object.""" return ("API Version Request Major: %s, Minor: %s" % (self.ver_major, self.ver_minor)) def is_null(self): return self.ver_major == 0 and self.ver_minor == 0 def _format_type_error(self, other): return TypeError(_("'%(other)s' should be an instance of '%(cls)s'") % {"other": other, "cls": self.__class__}) def __lt__(self, other): if not isinstance(other, APIVersionRequest): raise self._format_type_error(other) return ((self.ver_major, self.ver_minor) < (other.ver_major, other.ver_minor)) def __eq__(self, other): if not isinstance(other, APIVersionRequest): raise self._format_type_error(other) return ((self.ver_major, self.ver_minor) == (other.ver_major, other.ver_minor)) def __gt__(self, other): if not isinstance(other, APIVersionRequest): raise self._format_type_error(other) return ((self.ver_major, self.ver_minor) > (other.ver_major, other.ver_minor)) def __le__(self, other): return self < other or self == other def __ne__(self, other): return not self.__eq__(other) def __ge__(self, other): return self > other or self == other def matches(self, min_version, max_version): """Returns whether the version object represents a version greater than or equal to the minimum version and less than or equal to the maximum version. @param min_version: Minimum acceptable version. @param max_version: Maximum acceptable version. @returns: boolean If min_version is null then there is no minimum limit. If max_version is null then there is no maximum limit. If self is null then raise ValueError """ if self.is_null(): raise ValueError if max_version.is_null() and min_version.is_null(): return True elif max_version.is_null(): return min_version <= self elif min_version.is_null(): return self <= max_version else: return min_version <= self <= max_version def get_string(self): """Converts object to string representation which if used to create an APIVersionRequest object results in the same version request. """ if self.is_null(): raise ValueError return "%s.%s" % (self.ver_major, self.ver_minor) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/auth.py0000664000175100017510000001363515033036143017312 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Common Auth Middleware. """ from oslo_log import log as logging from oslo_middleware import request_id from oslo_serialization import jsonutils import webob.dec import webob.exc from masakari.api import wsgi import masakari.conf from masakari import context from masakari.i18n import _ CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def _load_pipeline(loader, pipeline): filters = [loader.get_filter(n) for n in pipeline[:-1]] app = loader.get_app(pipeline[-1]) filters.reverse() for filter in filters: app = filter(app) return app def pipeline_factory_v1(loader, global_conf, **local_conf): """A paste pipeline replica that keys off of auth_strategy.""" return _load_pipeline(loader, local_conf[CONF.auth_strategy].split()) class InjectContext(wsgi.Middleware): """Add a 'masakari.context' to WSGI environ.""" def __init__(self, context, *args, **kwargs): self.context = context super(InjectContext, self).__init__(*args, **kwargs) @webob.dec.wsgify(RequestClass=wsgi.Request) def __call__(self, req): req.environ['masakari.context'] = self.context return self.application class MasakariKeystoneContext(wsgi.Middleware): """Make a request context from keystone headers.""" @webob.dec.wsgify(RequestClass=wsgi.Request) def __call__(self, req): user_id = req.headers.get('X_USER') user_id = req.headers.get('X_USER_ID', user_id) if user_id is None: LOG.debug("Neither X_USER_ID nor X_USER found in request") return webob.exc.HTTPUnauthorized() roles = self._get_roles(req) if 'X_TENANT_ID' in req.headers: # This is the new header since Keystone went to ID/Name project_id = req.headers['X_TENANT_ID'] else: # This is for legacy compatibility project_id = req.headers['X_TENANT'] project_name = req.headers.get('X_TENANT_NAME') user_name = req.headers.get('X_USER_NAME') req_id = req.environ.get(request_id.ENV_REQUEST_ID) # Get the auth token auth_token = req.headers.get('X_AUTH_TOKEN', req.headers.get('X_STORAGE_TOKEN')) # Build a context, including the auth_token... remote_address = req.remote_addr if CONF.use_forwarded_for: remote_address = req.headers.get('X-Forwarded-For', remote_address) service_catalog = None if req.headers.get('X_SERVICE_CATALOG') is not None: try: catalog_header = req.headers.get('X_SERVICE_CATALOG') service_catalog = jsonutils.loads(catalog_header) except ValueError: raise webob.exc.HTTPInternalServerError( _('Invalid service catalog json.')) # NOTE: This is a full auth plugin set by auth_token # middleware in newer versions. user_auth_plugin = req.environ.get('keystone.token_auth') ctx = context.RequestContext(user_id, project_id, user_name=user_name, project_name=project_name, roles=roles, auth_token=auth_token, remote_address=remote_address, service_catalog=service_catalog, request_id=req_id, user_auth_plugin=user_auth_plugin) req.environ['masakari.context'] = ctx return self.application def _get_roles(self, req): """Get the list of roles.""" roles = req.headers.get('X_ROLES', '') return [r.strip() for r in roles.split(',')] class NoAuthMiddleware(wsgi.Middleware): """Return a fake token if one isn't specified. noauth2 provides admin privs if 'admin' is provided as the user id. """ @webob.dec.wsgify(RequestClass=wsgi.Request) def __call__(self, req): user_id = req.headers.get('X_USER', 'admin') user_id = req.headers.get('X_USER_ID', user_id) project_name = req.headers.get('X_TENANT_NAME') user_name = req.headers.get('X_USER_NAME') req_id = req.environ.get(request_id.ENV_REQUEST_ID) remote_address = req.remote_addr if CONF.use_forwarded_for: remote_address = req.headers.get('X-Forwarded-For', remote_address) service_catalog = None if req.headers.get('X_SERVICE_CATALOG') is not None: try: catalog_header = req.headers.get('X_SERVICE_CATALOG') service_catalog = jsonutils.loads(catalog_header) except ValueError: raise webob.exc.HTTPInternalServerError( _('Invalid service catalog json.')) ctx = context.RequestContext(user_id, user_name=user_name, project_name=project_name, remote_address=remote_address, service_catalog=service_catalog, request_id=req_id, is_admin=True) req.environ['masakari.context'] = ctx return self.application ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52571 masakari-19.1.0.dev18/masakari/api/openstack/0000775000175100017510000000000015033036146017761 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/__init__.py0000664000175100017510000002366415033036143022102 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ WSGI middleware for OpenStack API controllers. """ from oslo_log import log as logging import routes import stevedore import webob.dec import webob.exc from masakari.api.openstack import wsgi from masakari.api import wsgi as base_wsgi import masakari.conf from masakari.i18n import translate from masakari import utils LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF class FaultWrapper(base_wsgi.Middleware): """Calls down the middleware stack, making exceptions into faults.""" _status_to_type = {} @staticmethod def status_to_type(status): if not FaultWrapper._status_to_type: for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError): FaultWrapper._status_to_type[clazz.code] = clazz return FaultWrapper._status_to_type.get( status, webob.exc.HTTPInternalServerError)() def _error(self, inner, req): LOG.exception("Caught error: %s", str(inner)) safe = getattr(inner, 'safe', False) headers = getattr(inner, 'headers', None) status = getattr(inner, 'code', 500) if status is None: status = 500 msg_dict = dict(url=req.url, status=status) LOG.info("%(url)s returned with HTTP %(status)d", msg_dict) outer = self.status_to_type(status) if headers: outer.headers = headers if safe: user_locale = req.best_match_language() inner_msg = translate(inner.message, user_locale) outer.explanation = '%s: %s' % (inner.__class__.__name__, inner_msg) return wsgi.Fault(outer) @webob.dec.wsgify(RequestClass=wsgi.Request) def __call__(self, req): try: return req.get_response(self.application) except Exception as ex: return self._error(ex, req) class APIMapper(routes.Mapper): def routematch(self, url=None, environ=None): if url == "": result = self._match("", environ) return result[0], result[1] return routes.Mapper.routematch(self, url, environ) def connect(self, *args, **kargs): kargs.setdefault('requirements', {}) if not kargs['requirements'].get('format'): kargs['requirements']['format'] = 'json|xml' return routes.Mapper.connect(self, *args, **kargs) class ProjectMapper(APIMapper): def resource(self, member_name, collection_name, **kwargs): # NOTE(abhishekk): project_id parameter is only valid if its hex # or hex + dashes (note, integers are a subset of this). This # is required to hand our overlaping routes issues. project_id_regex = r'[0-9a-f\-]+' if CONF.osapi_v1.project_id_regex: project_id_regex = CONF.osapi_v1.project_id_regex project_id_token = '{project_id:%s}' % project_id_regex if 'parent_resource' not in kwargs: kwargs['path_prefix'] = '%s/' % project_id_token else: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '%s/%s/:%s_id' % ( project_id_token, p_collection, p_member) routes.Mapper.resource( self, member_name, collection_name, **kwargs) # while we are in transition mode, create additional routes # for the resource that do not include project_id. if 'parent_resource' not in kwargs: del kwargs['path_prefix'] else: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '%s/:%s_id' % (p_collection, p_member) routes.Mapper.resource(self, member_name, collection_name, **kwargs) class PlainMapper(APIMapper): def resource(self, member_name, collection_name, **kwargs): if 'parent_resource' in kwargs: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '%s/:%s_id' % (p_collection, p_member) routes.Mapper.resource(self, member_name, collection_name, **kwargs) class APIRouterV1(base_wsgi.Router): """Routes requests on the OpenStack v1 API to the appropriate controller and method. """ @classmethod def factory(cls, global_config, **local_config): """Simple paste factory :class:`masakari.api.wsgi.Router` doesn't have one. """ return cls() @staticmethod def api_extension_namespace(): return 'masakari.api.v1.extensions' def __init__(self, init_only=None): def _check_load_extension(ext): return self._register_extension(ext) self.api_extension_manager = stevedore.enabled.EnabledExtensionManager( namespace=self.api_extension_namespace(), check_func=_check_load_extension, invoke_on_load=True, invoke_kwds={"extension_info": self.loaded_extension_info}) mapper = ProjectMapper() self.resources = {} if list(self.api_extension_manager): self._register_resources_check_inherits(mapper) self.api_extension_manager.map(self._register_controllers) LOG.info("Loaded extensions: %s", sorted(self.loaded_extension_info.get_extensions().keys())) super(APIRouterV1, self).__init__(mapper) def _register_resources_list(self, ext_list, mapper): for ext in ext_list: self._register_resources(ext, mapper) def _register_resources_check_inherits(self, mapper): ext_has_inherits = [] ext_no_inherits = [] for ext in self.api_extension_manager: for resource in ext.obj.get_resources(): if resource.inherits: ext_has_inherits.append(ext) break else: ext_no_inherits.append(ext) self._register_resources_list(ext_no_inherits, mapper) self._register_resources_list(ext_has_inherits, mapper) @property def loaded_extension_info(self): raise NotImplementedError() def _register_extension(self, ext): raise NotImplementedError() def _register_resources(self, ext, mapper): """Register resources defined by the extensions Extensions define what resources they want to add through a get_resources function """ handler = ext.obj LOG.debug("Running _register_resources on %s", ext.obj) for resource in handler.get_resources(): LOG.debug('Extended resource: %s', resource.collection) inherits = None if resource.inherits: inherits = self.resources.get(resource.inherits) if not resource.controller: resource.controller = inherits.controller wsgi_resource = wsgi.ResourceV1(resource.controller, inherits=inherits) self.resources[resource.collection] = wsgi_resource kargs = dict( controller=wsgi_resource, collection=resource.collection_actions, member=resource.member_actions) if resource.parent: kargs['parent_resource'] = resource.parent # non core-API plugins use the collection name as the # member name, but the core-API plugins use the # singular/plural convention for member/collection names if resource.member_name: member_name = resource.member_name else: member_name = resource.collection mapper.resource(member_name, resource.collection, **kargs) if resource.custom_routes_fn: resource.custom_routes_fn(mapper, wsgi_resource) def _register_controllers(self, ext): """Register controllers defined by the extensions Extensions define what resources they want to add through a get_controller_extensions function """ handler = ext.obj LOG.debug("Running _register_controllers on %s", ext.obj) for extension in handler.get_controller_extensions(): ext_name = extension.extension.name collection = extension.collection controller = extension.controller if collection not in self.resources: LOG.warning('Extension %(ext_name)s: Cannot extend ' 'resource %(collection)s: No such resource', {'ext_name': ext_name, 'collection': collection}) continue LOG.debug('Extension %(ext_name)s extending resource: ' '%(collection)s', {'ext_name': ext_name, 'collection': collection}) resource = self.resources[collection] resource.register_actions(controller) resource.register_extensions(controller) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/common.py0000664000175100017510000002213615033036143021624 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from oslo_log import log as logging from urllib import parse as urlparse import webob import masakari.conf from masakari import exception from masakari.i18n import _ from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def remove_trailing_version_from_href(href): """Removes the api version from the href. Given: 'http://www.masakari.com/ha/v1.1' Returns: 'http://www.masakari.com/ha' Given: 'http://www.masakari.com/v1.1' Returns: 'http://www.masakari.com' """ parsed_url = urlparse.urlsplit(href) url_parts = parsed_url.path.rsplit('/', 1) # NOTE: this should match vX.X or vX expression = re.compile(r'^v([0-9]+|[0-9]+\.[0-9]+)(/.*|$)') if not expression.match(url_parts.pop()): LOG.debug('href %s does not contain version', href) raise ValueError(_('href %s does not contain version') % href) new_path = url_join(*url_parts) parsed_url = list(parsed_url) parsed_url[2] = new_path return urlparse.urlunsplit(parsed_url) def url_join(*parts): """Convenience method for joining parts of a URL Any leading and trailing '/' characters are removed, and the parts joined together with '/' as a separator. If last element of 'parts' is an empty string, the returned URL will have a trailing slash. """ parts = parts or [""] clean_parts = [part.strip("/") for part in parts if part] if not parts[-1]: # Empty last element should add a trailing slash clean_parts.append("") return "/".join(clean_parts) class ViewBuilder(object): """Model API responses as dictionaries.""" def _get_project_id(self, request): """Get project id from request url if present or empty string otherwise """ project_id = request.environ["masakari.context"].project_id if project_id in request.url: return project_id return '' def _get_links(self, request, identifier, collection_name): return [ { "rel": "self", "href": self._get_href_link(request, identifier, collection_name), }, { "rel": "bookmark", "href": self._get_bookmark_link(request, identifier, collection_name), } ] def _get_next_link(self, request, identifier, collection_name): """Return href string with proper limit and marker params.""" params = request.params.copy() params["marker"] = identifier prefix = self._update_masakari_link_prefix(request.application_url) url = url_join(prefix, self._get_project_id(request), collection_name) return "%s?%s" % (url, urlparse.urlencode(params)) def _get_href_link(self, request, identifier, collection_name): """Return an href string pointing to this object.""" prefix = self._update_masakari_link_prefix(request.application_url) return url_join(prefix, self._get_project_id(request), collection_name, str(identifier)) def _get_bookmark_link(self, request, identifier, collection_name): """Create a URL that refers to a specific resource.""" base_url = remove_trailing_version_from_href(request.application_url) base_url = self._update_masakari_link_prefix(base_url) return url_join(base_url, self._get_project_id(request), collection_name, str(identifier)) def _get_collection_links(self, request, items, collection_name, id_key="uuid"): """Retrieve 'next' link, if applicable. This is included if: 1) 'limit' param is specified and equals the number of items. 2) 'limit' param is specified but it exceeds CONF.osapi_max_limit, in this case the number of items is CONF.osapi_max_limit. 3) 'limit' param is NOT specified but the number of items is CONF.osapi_max_limit. """ links = [] max_items = min( int(request.params.get("limit", CONF.osapi_max_limit)), CONF.osapi_max_limit) if max_items and max_items == len(items): last_item = items[-1] if id_key in last_item: last_item_id = last_item[id_key] elif 'id' in last_item: last_item_id = last_item["id"] else: last_item_id = last_item["flavorid"] links.append({ "rel": "next", "href": self._get_next_link(request, last_item_id, collection_name), }) return links def _update_link_prefix(self, orig_url, prefix): if not prefix: return orig_url url_parts = list(urlparse.urlsplit(orig_url)) prefix_parts = list(urlparse.urlsplit(prefix)) url_parts[0:2] = prefix_parts[0:2] url_parts[2] = prefix_parts[2] + url_parts[2] return urlparse.urlunsplit(url_parts).rstrip('/') def _update_masakari_link_prefix(self, orig_url): return self._update_link_prefix(orig_url, CONF.osapi_masakari_link_prefix) def _get_int_param(request, param): """Extract integer param from request or fail.""" try: int_param = utils.validate_integer(request.GET[param], param, min_value=0) except exception.InvalidInput as e: raise webob.exc.HTTPBadRequest(explanation=e.format_message()) return int_param def _get_marker_param(request): """Extract marker id from request or fail.""" return request.GET['marker'] def get_pagination_params(request): """Return marker, limit tuple from request. :param request: `wsgi.Request` possibly containing 'marker' and 'limit' GET variables. 'marker' is the id of the last element the client has seen, and 'limit' is the maximum number of items to return. If 'limit' is not specified, 0, or > max_limit, we default to max_limit. Negative values for either marker or limit will cause exc.HTTPBadRequest() exceptions to be raised. """ params = {} if 'limit' in request.GET: params['limit'] = _get_int_param(request, 'limit') if 'page_size' in request.GET: params['page_size'] = _get_int_param(request, 'page_size') if 'marker' in request.GET: params['marker'] = _get_marker_param(request) return params def get_limit_and_marker(request, max_limit=CONF.osapi_max_limit): """get limited parameter from request.""" params = get_pagination_params(request) limit = params.get('limit', max_limit) limit = min(max_limit, limit) marker = params.get('marker') return limit, marker def get_sort_params(input_params, default_key='created_at', default_dir='desc'): """Retrieves sort keys/directions parameters. Processes the parameters to create a list of sort keys and sort directions that correspond to the 'sort_key' and 'sort_dir' parameter values. These sorting parameters can be specified multiple times in order to generate the list of sort keys and directions. The input parameters are not modified. :param input_params: webob.multidict of request parameters (from masakari.api.wsgi.Request.params) :param default_key: default sort key value, added to the list if no 'sort_key' parameters are supplied :param default_dir: default sort dir value, added to the list if no 'sort_dir' parameters are supplied :returns: list of sort keys, list of sort dirs """ params = input_params.copy() sort_keys = [] sort_dirs = [] while 'sort_key' in params: sort_keys.append(params.pop('sort_key').strip()) while 'sort_dir' in params: sort_dirs.append(params.pop('sort_dir').strip()) if len(sort_keys) == 0 and default_key: sort_keys.append(default_key) if len(sort_dirs) == 0 and default_dir: sort_dirs.append(default_dir) return sort_keys, sort_dirs ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/extensions.py0000664000175100017510000003233315033036143022533 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import functools import os from oslo_log import log as logging from oslo_utils import importutils import webob.dec import webob.exc from masakari.api.openstack import wsgi from masakari import exception from masakari.i18n import _ LOG = logging.getLogger(__name__) class ExtensionDescriptor(object): """Base class that defines the contract for extensions. Note that you don't have to derive from this class to have a valid extension; it is purely a convenience. """ # The name of the extension, e.g., 'Fox In Socks' name = None # The alias for the extension, e.g., 'FOXNSOX' alias = None # Description comes from the docstring for the class # The timestamp when the extension was last updated, e.g., # '2011-01-22T19:25:27Z' updated = None def __init__(self, ext_mgr): """Register extension with the extension manager.""" ext_mgr.register(self) self.ext_mgr = ext_mgr def get_resources(self): """List of extensions.ResourceExtension extension objects. Resources define new nouns, and are accessible through URLs. """ resources = [] return resources def get_controller_extensions(self): """List of extensions.ControllerExtension extension objects. Controller extensions are used to extend existing controllers. """ controller_exts = [] return controller_exts def __repr__(self): return "" % ( self.name, self.alias, self.updated) def is_valid(self): """Validate required fields for extensions. Raises an attribute error if the attr is not defined """ for attr in ('name', 'alias', 'updated', 'namespace'): if getattr(self, attr) is None: raise AttributeError("%s is None, needs to be defined" % attr) return True class ExtensionsController(wsgi.Resource): def __init__(self, extension_manager): self.extension_manager = extension_manager super(ExtensionsController, self).__init__(None) def _translate(self, ext): ext_data = {'name': ext.name, 'alias': ext.alias, 'description': ext.__doc__, 'namespace': ext.namespace, 'updated': ext.updated, 'links': []} return ext_data def index(self, req): extensions = [] for ext in self.extension_manager.sorted_extensions(): extensions.append(self._translate(ext)) return dict(extensions=extensions) def show(self, req, id): try: ext = self.extension_manager.extensions[id] except KeyError: raise webob.exc.HTTPNotFound() return dict(extension=self._translate(ext)) def delete(self, req, id): raise webob.exc.HTTPNotFound() def create(self, req, body): raise webob.exc.HTTPNotFound() class ExtensionManager(object): """Load extensions from the configured extension path.""" def sorted_extensions(self): if self.sorted_ext_list is None: self.sorted_ext_list = sorted(self.extensions.items()) for _alias, ext in self.sorted_ext_list: yield ext def is_loaded(self, alias): return alias in self.extensions def register(self, ext): # Do nothing if the extension doesn't check out if not self._check_extension(ext): return alias = ext.alias if alias in self.extensions: raise exception.MasakariException( "Found duplicate extension: %s" % alias) self.extensions[alias] = ext self.sorted_ext_list = None def get_resources(self): """Returns a list of ResourceExtension objects.""" resources = [] resources.append(ResourceExtension('extensions', ExtensionsController(self))) for ext in self.sorted_extensions(): try: resources.extend(ext.get_resources()) except AttributeError: pass return resources def get_controller_extensions(self): """Returns a list of ControllerExtension objects.""" controller_exts = [] for ext in self.sorted_extensions(): try: get_ext_method = ext.get_controller_extensions except AttributeError: continue controller_exts.extend(get_ext_method()) return controller_exts def _check_extension(self, extension): """Checks for required methods in extension objects.""" try: extension.is_valid() except AttributeError: LOG.exception("Exception loading extension") return False return True def load_extension(self, ext_factory): """Execute an extension factory. Loads an extension. The 'ext_factory' is the name of a callable that will be imported and called with one argument--the extension manager. The factory callable is expected to call the register() method at least once. """ LOG.debug("Loading extension %s", ext_factory) if isinstance(ext_factory, str): # Load the factory factory = importutils.import_class(ext_factory) else: factory = ext_factory # Call it LOG.debug("Calling extension factory %s", ext_factory) factory(self) def _load_extensions(self): """Load extensions specified on the command line.""" extensions = list(self.cls_list) for ext_factory in extensions: try: self.load_extension(ext_factory) except Exception as exc: LOG.warning('Failed to load extension %(ext_factory)s: ' '%(exc)s', {'ext_factory': ext_factory, 'exc': exc}) class ControllerExtension(object): """Extend core controllers of masakari OpenStack API. Provide a way to extend existing masakari OpenStack API core controllers. """ def __init__(self, extension, collection, controller): self.extension = extension self.collection = collection self.controller = controller class ResourceExtension(object): """Add top level resources to the OpenStack API in masakari.""" def __init__(self, collection, controller=None, parent=None, collection_actions=None, member_actions=None, custom_routes_fn=None, inherits=None, member_name=None): if not collection_actions: collection_actions = {} if not member_actions: member_actions = {} self.collection = collection self.controller = controller self.parent = parent self.collection_actions = collection_actions self.member_actions = member_actions self.custom_routes_fn = custom_routes_fn self.inherits = inherits self.member_name = member_name def load_standard_extensions(ext_mgr, logger, path, package, ext_list=None): """Registers all standard API extensions.""" # Walk through all the modules in our directory... our_dir = path[0] for dirpath, dirnames, filenames in os.walk(our_dir): # Compute the relative package name from the dirpath relpath = os.path.relpath(dirpath, our_dir) if relpath == '.': relpkg = '' else: relpkg = '.%s' % '.'.join(relpath.split(os.sep)) # Now, consider each file in turn, only considering .py files for fname in filenames: root, ext = os.path.splitext(fname) # Skip __init__ and anything that's not .py if ext != '.py' or root == '__init__': continue # Try loading it classname = "%s%s" % (root[0].upper(), root[1:]) classpath = ("%s%s.%s.%s" % (package, relpkg, root, classname)) if ext_list is not None and classname not in ext_list: logger.debug("Skipping extension: %s" % classpath) continue try: ext_mgr.load_extension(classpath) except Exception as exc: logger.warn('Failed to load extension %(classpath)s: %(exc)s', {'classpath': classpath, 'exc': exc}) # Now, let's consider any subdirectories we may have... subdirs = [] for dname in dirnames: # Skip it if it does not have __init__.py if not os.path.exists(os.path.join(dirpath, dname, '__init__.py')): continue # If it has extension(), delegate... ext_name = "%s%s.%s.extension" % (package, relpkg, dname) try: ext = importutils.import_class(ext_name) except ImportError: # extension() doesn't exist on it, so we'll explore # the directory for ourselves subdirs.append(dname) else: try: ext(ext_mgr) except Exception as exc: logger.warn('Failed to load extension %(ext_name)s:' '%(exc)s', {'ext_name': ext_name, 'exc': exc}) # Update the list of directories we'll explore... # using os.walk 'the caller can modify the dirnames list in-place, # and walk() will only recurse into the subdirectories whose names # remain in dirnames' # https://docs.python.org/2/library/os.html#os.walk dirnames[:] = subdirs class V1APIExtensionBase(object, metaclass=abc.ABCMeta): """Abstract base class for all v1 API extensions. All v1 API extensions must derive from this class and implement the abstract methods get_resources and get_controller_extensions even if they just return an empty list. The extensions must also define the abstract properties. """ def __init__(self, extension_info): self.extension_info = extension_info @abc.abstractmethod def get_resources(self): """Return a list of resources extensions. The extensions should return a list of ResourceExtension objects. This list may be empty. """ pass @abc.abstractmethod def get_controller_extensions(self): """Return a list of controller extensions. The extensions should return a list of ControllerExtension objects. This list may be empty. """ pass @property @abc.abstractmethod def name(self): """Name of the extension.""" pass @property @abc.abstractmethod def alias(self): """Alias for the extension.""" pass @property @abc.abstractmethod def version(self): """Version of the extension.""" pass def __repr__(self): return "" % ( self.name, self.alias, self.version) def is_valid(self): """Validate required fields for extensions. Raises an attribute error if the attr is not defined """ for attr in ('name', 'alias', 'version'): if getattr(self, attr) is None: raise AttributeError("%s is None, needs to be defined" % attr) return True def expected_errors(errors): """Decorator for v1 API methods which specifies expected exceptions. Specify which exceptions may occur when an API method is called. If an unexpected exception occurs then return a 500 instead and ask the user of the API to file a bug report. """ def decorator(f): @functools.wraps(f) def wrapped(*args, **kwargs): try: return f(*args, **kwargs) except Exception as exc: if isinstance(exc, webob.exc.WSGIHTTPException): if isinstance(errors, int): t_errors = (errors,) else: t_errors = errors if exc.code in t_errors: raise elif isinstance(exc, exception.Forbidden): raise elif isinstance(exc, exception.ValidationError): raise LOG.exception("Unexpected exception in API method") msg = _('Unexpected API Error. Please report this at ' 'https://bugs.launchpad.net/masakari/ and attach the ' 'Masakari API log if possible.\n%s') % type(exc) raise webob.exc.HTTPInternalServerError(explanation=msg) return wrapped return decorator ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52671 masakari-19.1.0.dev18/masakari/api/openstack/ha/0000775000175100017510000000000015033036146020351 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/__init__.py0000664000175100017510000000247615033036143022470 0ustar00mylesmyles# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ WSGI middleware for OpenStack Masakari API. """ import masakari.api.openstack from masakari.api.openstack.ha import extension_info import masakari.conf CONF = masakari.conf.CONF class APIRouterV1(masakari.api.openstack.APIRouterV1): """Routes requests on the OpenStack API to the appropriate controller and method. """ def __init__(self, init_only=None): self._loaded_extension_info = extension_info.LoadedExtensionInfo() super(APIRouterV1, self).__init__(init_only) def _register_extension(self, ext): return self.loaded_extension_info.register_extension(ext.obj) @property def loaded_extension_info(self): return self._loaded_extension_info ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/extension_info.py0000664000175100017510000001044015033036143023746 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus from oslo_log import log as logging import webob.exc from masakari.api.openstack import extensions from masakari.api.openstack import wsgi from masakari import exception from masakari.policies import base as base_policies from masakari.policies import extension_info as extension_policies ALIAS = 'extensions' LOG = logging.getLogger(__name__) class FakeExtension(object): def __init__(self, name, alias, description=""): self.name = name self.alias = alias self.__doc__ = description self.version = -1 class ExtensionInfoController(wsgi.Controller): def __init__(self, extension_info): self.extension_info = extension_info def _translate(self, ext): ext_data = {"name": ext.name, "alias": ext.alias, "description": ext.__doc__, "namespace": "", "updated": "", "links": []} return ext_data def _create_fake_ext(self, name, alias, description=""): return FakeExtension(name, alias, description) def _get_extensions(self, context): """Filter extensions list based on policy.""" discoverable_extensions = dict() for alias, ext in self.extension_info.get_extensions().items(): action = ':'.join([ base_policies.MASAKARI_API, alias, 'discoverable']) if context.can(action, fatal=False): discoverable_extensions[alias] = ext else: LOG.debug("Filter out extension %s from discover list", alias) return discoverable_extensions @extensions.expected_errors(()) def index(self, req): context = req.environ['masakari.context'] context.can(extension_policies.EXTENSIONS % 'index') discoverable_extensions = self._get_extensions(context) sorted_ext_list = sorted(discoverable_extensions.items()) extensions = [] for _alias, ext in sorted_ext_list: extensions.append(self._translate(ext)) return dict(extensions=extensions) @extensions.expected_errors(HTTPStatus.NOT_FOUND) def show(self, req, id): context = req.environ['masakari.context'] context.can(extension_policies.EXTENSIONS % 'detail') try: ext = self._get_extensions(context)[id] except KeyError: raise webob.exc.HTTPNotFound() return dict(extension=self._translate(ext)) class ExtensionInfo(extensions.V1APIExtensionBase): """Extension information.""" name = "Extensions" alias = ALIAS version = 1 def get_resources(self): resources = [ extensions.ResourceExtension( ALIAS, ExtensionInfoController(self.extension_info), member_name='extension')] return resources def get_controller_extensions(self): return [] class LoadedExtensionInfo(object): """Keep track of all loaded API extensions.""" def __init__(self): self.extensions = {} def register_extension(self, ext): if not self._check_extension(ext): return False alias = ext.alias if alias in self.extensions: raise exception.MasakariException( "Found duplicate extension: %s" % alias) self.extensions[alias] = ext return True def _check_extension(self, extension): """Checks for required methods in extension objects.""" try: extension.is_valid() except AttributeError: LOG.exception("Exception loading extension") return False return True def get_extensions(self): return self.extensions ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/hosts.py0000664000175100017510000001722215033036143022064 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The Host API extension.""" from http import HTTPStatus from oslo_utils import strutils from webob import exc from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack.ha.schemas import hosts as schema from masakari.api.openstack.ha.views import hosts as views_hosts from masakari.api.openstack import wsgi from masakari.api import validation from masakari import exception from masakari.ha import api as host_api from masakari.i18n import _ from masakari import objects from masakari.policies import hosts as host_policies ALIAS = "os-hosts" class HostsController(wsgi.Controller): """The Host API controller for the OpenStack API.""" def __init__(self): self.api = host_api.HostAPI() @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) def index(self, req, segment_id): """Returns a list a hosts.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'index') try: filters = {} limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) segment = objects.FailoverSegment.get_by_uuid(context, segment_id) filters['failover_segment_id'] = segment.uuid if 'name' in req.params: filters['name'] = req.params['name'] if 'type' in req.params: filters['type'] = req.params['type'] if 'control_attributes' in req.params: filters['control_attributes'] = req.params[ 'control_attributes'] if 'on_maintenance' in req.params: try: filters['on_maintenance'] = strutils.bool_from_string( req.params['on_maintenance'], strict=True) except ValueError as ex: msg = _("Invalid value for on_maintenance: %s") % ex raise exc.HTTPBadRequest(explanation=msg) if 'reserved' in req.params: try: filters['reserved'] = strutils.bool_from_string( req.params['reserved'], strict=True) except ValueError as ex: msg = _("Invalid value for reserved: %s") % ex raise exc.HTTPBadRequest(explanation=msg) hosts = self.api.get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) except exception.MarkerNotFound as ex: raise exc.HTTPBadRequest(explanation=ex.format_message()) except exception.Invalid as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.FailoverSegmentNotFound as ex: raise exc.HTTPNotFound(explanation=ex.format_message()) builder = views_hosts.get_view_builder(req) return builder.build_hosts(hosts) @wsgi.response(HTTPStatus.CREATED) @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND, HTTPStatus.CONFLICT)) @validation.schema(schema.create) def create(self, req, segment_id, body): """Creates a host.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'create') host_data = body.get('host') try: host = self.api.create_host(context, segment_id, host_data) except exception.ComputeNotFoundByName as e: raise exc.HTTPBadRequest(explanation=e.message) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.HostExists as e: raise exc.HTTPConflict(explanation=e.format_message()) builder = views_hosts.get_view_builder(req) return {'host': builder.build_host(host)} @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) def show(self, req, segment_id, id): """Shows the details of a host.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'detail') try: host = self.api.get_host(context, segment_id, id) except exception.HostNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) builder = views_hosts.get_view_builder(req) return {'host': builder.build_host(host)} @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND, HTTPStatus.CONFLICT)) @validation.schema(schema.update) def update(self, req, segment_id, id, body): """Updates the existing host.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'update') host_data = body.get('host') try: host = self.api.update_host(context, segment_id, id, host_data) except exception.ComputeNotFoundByName as e: raise exc.HTTPBadRequest(explanation=e.message) except exception.HostNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except (exception.HostExists, exception.Conflict) as e: raise exc.HTTPConflict(explanation=e.format_message()) builder = views_hosts.get_view_builder(req) return {'host': builder.build_host(host)} @wsgi.response(HTTPStatus.NO_CONTENT) @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND, HTTPStatus.CONFLICT)) def delete(self, req, segment_id, id): """Removes a host by id.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'delete') try: self.api.delete_host(context, segment_id, id) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.HostNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.Conflict as e: raise exc.HTTPConflict(explanation=e.format_message()) class Hosts(extensions.V1APIExtensionBase): """Hosts controller""" name = "Hosts" alias = ALIAS version = 1 def get_resources(self): parent = {'member_name': 'segment', 'collection_name': 'segments'} resources = [ extensions.ResourceExtension( 'hosts', HostsController(), parent=parent, member_name='host')] return resources def get_controller_extensions(self): return [] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/notifications.py0000664000175100017510000001422415033036143023574 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus from oslo_utils import timeutils from webob import exc from masakari.api import api_version_request from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack.ha.schemas import notifications as schema from masakari.api.openstack.ha.schemas import payload as payload_schema from masakari.api.openstack import wsgi from masakari.api import validation from masakari import exception from masakari.ha import api as notification_api from masakari.i18n import _ from masakari.objects import fields from masakari.policies import notifications as notifications_policies ALIAS = 'notifications' class NotificationsController(wsgi.Controller): """Notifications controller for the OpenStack API.""" def __init__(self): self.api = notification_api.NotificationAPI() @validation.schema(payload_schema.create_process_payload) def _validate_process_payload(self, req, body): pass @validation.schema(payload_schema.create_vm_payload) def _validate_vm_payload(self, req, body): pass @validation.schema(payload_schema.create_compute_host_payload) def _validate_comp_host_payload(self, req, body): pass @wsgi.response(HTTPStatus.ACCEPTED) @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN, HTTPStatus.CONFLICT)) @validation.schema(schema.create) def create(self, req, body): """Creates a new notification.""" context = req.environ['masakari.context'] context.can(notifications_policies.NOTIFICATIONS % 'create') notification_data = body['notification'] if notification_data['type'] == fields.NotificationType.PROCESS: self._validate_process_payload(req, body=notification_data['payload']) if notification_data['type'] == fields.NotificationType.VM: self._validate_vm_payload(req, body=notification_data['payload']) if notification_data['type'] == fields.NotificationType.COMPUTE_HOST: self._validate_comp_host_payload(req, body=notification_data['payload']) try: notification = self.api.create_notification( context, notification_data) except exception.HostNotFoundByName as err: raise exc.HTTPBadRequest(explanation=err.format_message()) except (exception.DuplicateNotification, exception.HostOnMaintenanceError) as err: raise exc.HTTPConflict(explanation=err.format_message()) return {'notification': notification} @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN)) def index(self, req): """Returns a summary list of notifications.""" context = req.environ['masakari.context'] context.can(notifications_policies.NOTIFICATIONS % 'index') try: limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) filters = {} if 'status' in req.params: filters['status'] = req.params['status'] if 'source_host_uuid' in req.params: filters['source_host_uuid'] = req.params['source_host_uuid'] if 'type' in req.params: filters['type'] = req.params['type'] if 'generated-since' in req.params: try: parsed = timeutils.parse_isotime( req.params['generated-since']) except ValueError: msg = _('Invalid generated-since value') raise exc.HTTPBadRequest(explanation=msg) filters['generated-since'] = parsed notifications = self.api.get_all(context, filters, sort_keys, sort_dirs, limit, marker) except exception.MarkerNotFound as err: raise exc.HTTPBadRequest(explanation=err.format_message()) except exception.Invalid as err: raise exc.HTTPBadRequest(explanation=err.format_message()) return {'notifications': notifications} @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) def show(self, req, id): """Return data about the given notification id.""" context = req.environ['masakari.context'] context.can(notifications_policies.NOTIFICATIONS % 'detail') try: if api_version_request.is_supported(req, min_version='1.1'): notification = ( self.api.get_notification_recovery_workflow_details( context, id)) else: notification = self.api.get_notification(context, id) except exception.NotificationNotFound as err: raise exc.HTTPNotFound(explanation=err.format_message()) return {'notification': notification} class Notifications(extensions.V1APIExtensionBase): """Notifications support.""" name = "Notifications" alias = ALIAS version = 1 def get_resources(self): member_actions = {'action': 'POST'} resources = [ extensions.ResourceExtension(ALIAS, NotificationsController(), member_name='notification', member_actions=member_actions) ] return resources def get_controller_extensions(self): return [] ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52671 masakari-19.1.0.dev18/masakari/api/openstack/ha/schemas/0000775000175100017510000000000015033036146021774 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/schemas/__init__.py0000664000175100017510000000000015033036143024070 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/schemas/hosts.py0000664000175100017510000000342315033036143023505 0ustar00mylesmyles# Copyright 2016 NTT DATA. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from masakari.api.validation import parameter_types _base = { 'type': 'object', 'properties': { 'host': { 'type': 'object', 'properties': { 'name': parameter_types.name, 'type': parameter_types.type, 'control_attributes': parameter_types.description, 'reserved': parameter_types.boolean, 'on_maintenance': parameter_types.boolean }, 'additionalProperties': False } }, 'required': ['host'], 'additionalProperties': False } create = copy.deepcopy(_base) create['properties']['host']['required'] = ['name', 'type', 'control_attributes'] update = copy.deepcopy(_base) update['properties']['host']['anyOf'] = [{'required': ['name']}, {'required': ['type']}, {'required': ['control_attributes']}, {'required': ['reserved']}, {'required': ['on_maintenance']}, ] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/schemas/notifications.py0000664000175100017510000000265315033036143025222 0ustar00mylesmyles# Copyright 2016 NTT DATA. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.api.validation import parameter_types from masakari.objects import fields create = { 'type': 'object', 'properties': { 'notification': { 'type': 'object', 'properties': { 'type': { 'type': 'string', 'enum': fields.NotificationType.ALL, }, 'hostname': parameter_types.hostname, 'generated_time': { 'type': 'string', 'format': 'date-time', }, 'payload': parameter_types.payload, }, 'required': ['type', 'hostname', 'generated_time', 'payload'], 'additionalProperties': False } }, 'required': ['notification'], 'additionalProperties': False } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/schemas/payload.py0000664000175100017510000000365515033036143024005 0ustar00mylesmyles# Copyright 2018 NTT DATA. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.objects import fields create_compute_host_payload = { 'type': 'object', 'properties': { 'host_status': { 'enum': fields.HostStatusType.ALL, 'type': 'string'}, 'cluster_status': { 'enum': fields.ClusterStatusType.ALL, 'type': 'string'}, 'event': { 'enum': fields.EventType.ALL, 'type': 'string'}, }, 'required': ['event'], 'additionalProperties': False } create_process_payload = { 'type': 'object', 'properties': { 'process_name': { 'type': 'string', 'minLength': 1, 'maxLength': 4096}, 'event': { 'enum': fields.EventType.ALL, 'type': 'string'}, }, 'required': ['process_name', 'event'], 'additionalProperties': False } create_vm_payload = { 'type': 'object', 'properties': { 'instance_uuid': { 'type': 'string', 'format': 'uuid'}, 'vir_domain_event': { 'type': 'string', 'minLength': 1, 'maxLength': 255}, 'event': { 'type': 'string', 'minLength': 1, 'maxLength': 255}, }, 'required': ['instance_uuid', 'vir_domain_event', 'event'], 'additionalProperties': False } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/schemas/segments.py0000664000175100017510000000424315033036143024173 0ustar00mylesmyles# Copyright 2016 NTT DATA. # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from masakari.api.validation import parameter_types _base = { 'type': 'object', 'properties': { 'segment': { 'type': 'object', 'properties': { 'name': parameter_types.name, 'description': parameter_types.description, 'recovery_method': { 'type': 'string', 'enum': ["auto", "reserved_host", "auto_priority", "rh_priority"] }, 'service_type': parameter_types.name }, 'additionalProperties': False } }, 'required': ['segment'], 'additionalProperties': False } create = copy.deepcopy(_base) create['properties']['segment']['required'] = ['name', 'recovery_method', 'service_type'] create_v12 = copy.deepcopy(create) create_v12['properties']['segment']['properties']['enabled'] = \ parameter_types.boolean update = copy.deepcopy(_base) update['properties']['segment']['anyOf'] = [{'required': ['name']}, {'required': ['description']}, {'required': ['recovery_method']}, {'required': ['service_type']}, ] update_v12 = copy.deepcopy(update) update_v12['properties']['segment']['properties']['enabled'] = \ parameter_types.boolean update_v12['properties']['segment']['anyOf'].append({'required': ['enabled']}) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/segments.py0000664000175100017510000001276615033036143022561 0ustar00mylesmyles# Copyright 2016 NTT DATA. # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus from webob import exc from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack.ha.schemas import segments as schema from masakari.api.openstack import wsgi from masakari.api import validation from masakari import exception from masakari.ha import api as segment_api from masakari.policies import segments as segment_policies ALIAS = 'segments' class SegmentsController(wsgi.Controller): """Segments controller for the OpenStack API.""" def __init__(self): self.api = segment_api.FailoverSegmentAPI() @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN)) def index(self, req): """Returns a summary list of failover segments.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'index') try: limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) filters = {} for field in ['recovery_method', 'service_type', 'enabled']: if field in req.params: filters[field] = req.params[field] segments = self.api.get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) except exception.MarkerNotFound as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.Invalid as e: raise exc.HTTPBadRequest(explanation=e.format_message()) return {'segments': segments} @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) def show(self, req, id): """Return data about the given segment id.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'detail') try: segment = self.api.get_segment(context, id) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) return {'segment': segment} @wsgi.response(HTTPStatus.CREATED) @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.CONFLICT)) @validation.schema(schema.create, '1.0', '1.1') @validation.schema(schema.create_v12, '1.2') def create(self, req, body): """Creates a new failover segment.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'create') segment_data = body['segment'] try: segment = self.api.create_segment(context, segment_data) except exception.FailoverSegmentExists as e: raise exc.HTTPConflict(explanation=e.format_message()) return {'segment': segment} @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND, HTTPStatus.CONFLICT)) @validation.schema(schema.update, '1.0', '1.1') @validation.schema(schema.update_v12, '1.2') def update(self, req, id, body): """Updates the existing segment.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'update') segment_data = body['segment'] try: segment = self.api.update_segment(context, id, segment_data) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except (exception.FailoverSegmentExists, exception.Conflict) as e: raise exc.HTTPConflict(explanation=e.format_message()) return {'segment': segment} @wsgi.response(HTTPStatus.NO_CONTENT) @extensions.expected_errors((HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND, HTTPStatus.CONFLICT)) def delete(self, req, id): """Removes a segment by uuid.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'delete') try: self.api.delete_segment(context, id) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.Conflict as e: raise exc.HTTPConflict(explanation=e.format_message()) class Segments(extensions.V1APIExtensionBase): """Segments Extension.""" name = "Segments" alias = ALIAS version = 1 def get_resources(self): member_actions = {'action': 'POST'} resources = [ extensions.ResourceExtension(ALIAS, SegmentsController(), member_name='segment', member_actions=member_actions) ] return resources def get_controller_extensions(self): return [] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/versions.py0000664000175100017510000000443415033036143022575 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus from oslo_config import cfg from masakari.api import api_version_request from masakari.api.openstack.ha.views import versions as views_versions from masakari.api.openstack import wsgi CONF = cfg.CONF LINKS = { 'v1.0': { 'html': 'https://docs.openstack.org/' }, } VERSIONS = { "v1.0": { "id": "v1.0", "status": "CURRENT", "version": api_version_request._MAX_API_VERSION, "min_version": api_version_request._MIN_API_VERSION, "updated": "2016-07-01T11:33:21Z", "links": [ { "rel": "describedby", "type": "text/html", "href": LINKS['v1.0']['html'], }, ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.masakari+json;version=1", } ], } } class Versions(wsgi.Resource): def __init__(self): super(Versions, self).__init__(None) def index(self, req, body=None): """Return all versions.""" builder = views_versions.get_view_builder(req) return builder.build_versions(VERSIONS) @wsgi.response(HTTPStatus.MULTIPLE_CHOICES) def multi(self, req, body=None): """Return multiple choices.""" builder = views_versions.get_view_builder(req) return builder.build_choices(VERSIONS, req) def get_action_args(self, request_environment): """Parse dictionary created by routes library.""" args = {} if request_environment['PATH_INFO'] == '/': args['action'] = 'index' else: args['action'] = 'multi' return args ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/versionsV1.py0000664000175100017510000000376315033036143023010 0ustar00mylesmyles# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus import webob.exc from masakari.api.openstack import extensions from masakari.api.openstack.ha import versions from masakari.api.openstack.ha.views import versions as views_versions from masakari.api.openstack import wsgi ALIAS = "versions" class VersionsController(wsgi.Controller): @extensions.expected_errors(HTTPStatus.NOT_FOUND) def show(self, req, id='v1.0'): builder = views_versions.get_view_builder(req) if id not in versions.VERSIONS: raise webob.exc.HTTPNotFound() return builder.build_version(versions.VERSIONS[id]) class Versions(extensions.V1APIExtensionBase): """API Version information.""" name = "Versions" alias = ALIAS version = 1 def get_resources(self): resources = [ extensions.ResourceExtension(ALIAS, VersionsController(), custom_routes_fn=self.version_map)] return resources def get_controller_extensions(self): return [] def version_map(self, mapper, wsgi_resource): self.map_path(mapper, wsgi_resource, '/') self.map_path(mapper, wsgi_resource, '') @staticmethod def map_path(mapper, wsgi_resource, path): mapper.connect("versions", path, controller=wsgi_resource, action='show', conditions={"method": ['GET']}) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52671 masakari-19.1.0.dev18/masakari/api/openstack/ha/views/0000775000175100017510000000000015033036146021506 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/views/__init__.py0000664000175100017510000000000015033036143023602 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/views/hosts.py0000664000175100017510000000366015033036143023222 0ustar00mylesmyles# Copyright (c) 2020 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.api.openstack import common def get_view_builder(req): base_url = req.application_url return ViewBuilder(base_url) class ViewBuilder(common.ViewBuilder): def __init__(self, base_url): """:param base_url: url of the root wsgi application.""" self.prefix = self._update_masakari_link_prefix(base_url) self.base_url = base_url def _host_details(self, host): return { 'id': host.id, 'uuid': host.uuid, 'name': host.name, 'failover_segment_id': host.failover_segment.uuid, 'failover_segment': host.failover_segment, 'type': host.type, 'reserved': host.reserved, 'control_attributes': host.control_attributes, 'on_maintenance': host.on_maintenance, 'created_at': host.created_at, 'updated_at': host.updated_at, 'deleted_at': host.deleted_at, 'deleted': host.deleted } def build_host(self, host): get_host_response = self._host_details(host) return get_host_response def build_hosts(self, hosts): host_objs = [] for host in hosts: get_host_response = self._host_details(host) host_objs.append(get_host_response) return dict(hosts=host_objs) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/views/versions.py0000664000175100017510000000613715033036143023734 0ustar00mylesmyles# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from masakari.api.openstack import common def get_view_builder(req): base_url = req.application_url return ViewBuilder(base_url) class ViewBuilder(common.ViewBuilder): def __init__(self, base_url): """:param base_url: url of the root wsgi application.""" self.prefix = self._update_masakari_link_prefix(base_url) self.base_url = base_url def build_choices(self, VERSIONS, req): version_objs = [] for version in sorted(VERSIONS): version = VERSIONS[version] version_objs.append({ "id": version['id'], "status": version['status'], "links": [ { "rel": "self", "href": self.generate_href(version['id'], req.path), }, ], "media-types": version['media-types'], }) return dict(choices=version_objs) def build_versions(self, versions): version_objs = [] for version in sorted(versions.keys()): version = versions[version] version_objs.append({ "id": version['id'], "status": version['status'], "version": version['version'], "min_version": version['min_version'], "updated": version['updated'], "links": self._build_links(version), }) return dict(versions=version_objs) def build_version(self, version): reval = copy.deepcopy(version) reval['links'].insert(0, { "rel": "self", "href": self.prefix.rstrip('/') + '/', }) return dict(version=reval) def _build_links(self, version_data): """Generate a container of links that refer to the provided version.""" href = self.generate_href(version_data['id']) links = [ { "rel": "self", "href": href, }, ] return links def generate_href(self, version, path=None): """Create an url that refers to a specific version_number.""" # TODO(Dinesh_Bhor) When there will be increment in version then we can # define the default version_number according to the 'version' given # but currently the 'version_number' should be 'v1' by default. version_number = 'v1' path = path or '' return common.url_join(self.prefix, version_number, path) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/ha/vmoves.py0000664000175100017510000000742515033036143022247 0ustar00mylesmyles# Copyright(c) 2022 Inspur # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The VM Move API extension.""" from http import HTTPStatus from webob import exc from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack import wsgi from masakari import exception from masakari.ha import api as vmove_api from masakari.policies import vmoves as vmove_policies ALIAS = "vmoves" class VMovesController(wsgi.Controller): """The VM move API controller for the Instance HA.""" def __init__(self): self.api = vmove_api.VMoveAPI() @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) def index(self, req, notification_id): """Returns a list of vmoves.""" context = req.environ['masakari.context'] context.can(vmove_policies.VMOVES % 'index') try: filters = {} limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) if 'status' in req.params: filters['status'] = req.params['status'] if 'type' in req.params: filters['type'] = req.params['type'] vmoves = self.api.get_all(context, notification_id, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) except exception.MarkerNotFound as ex: raise exc.HTTPBadRequest(explanation=ex.format_message()) except exception.Invalid as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.NotificationWithoutVMoves as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.NotificationNotFound as ex: raise exc.HTTPNotFound(explanation=ex.format_message()) return {'vmoves': vmoves} @extensions.expected_errors((HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) def show(self, req, notification_id, id): """Shows the details of one vmove.""" context = req.environ['masakari.context'] context.can(vmove_policies.VMOVES % 'detail') try: vmove = self.api.get_vmove(context, notification_id, id) except exception.NotificationWithoutVMoves as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.VMoveNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) return {'vmove': vmove} class VMoves(extensions.V1APIExtensionBase): """vmoves controller""" name = "vmoves" alias = ALIAS version = 1 def get_resources(self): parent = {'member_name': 'notification', 'collection_name': 'notifications'} resources = [ extensions.ResourceExtension( 'vmoves', VMovesController(), parent=parent, member_name='vmove')] return resources def get_controller_extensions(self): return [] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/openstack/wsgi.py0000664000175100017510000011167415033036143021313 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools from http import HTTPStatus import inspect import microversion_parse from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import strutils import webob from masakari.api import api_version_request as api_version from masakari.api import versioned_method from masakari.api import wsgi from masakari import exception from masakari import i18n from masakari.i18n import _ LOG = logging.getLogger(__name__) _SUPPORTED_CONTENT_TYPES = ( 'application/json', 'application/vnd.openstack.masakari+json', ) _MEDIA_TYPE_MAP = { 'application/vnd.openstack.masakari+json': 'json', 'application/json': 'json', } # These are typically automatically created by routes as either defaults # collection or member methods. _ROUTES_METHODS = [ 'create', 'delete', 'show', 'update', ] _METHODS_WITH_BODY = [ 'POST', 'PUT', ] # name of attribute to keep version method information VER_METHOD_ATTR = 'versioned_methods' # Names of headers used by clients to request a specific version # of the REST API API_VERSION_REQUEST_HEADER = 'OpenStack-API-Version' def get_supported_content_types(): return _SUPPORTED_CONTENT_TYPES def get_media_map(): return dict(_MEDIA_TYPE_MAP.items()) # NOTE: This function allows a get on both a dict-like and an # object-like object. cache_db_items() is used on both versioned objects and # dicts, so the function can't be totally changed over to [] syntax, nor # can it be changed over to use getattr(). def item_get(item, item_key): if hasattr(item, '__getitem__'): return item[item_key] else: return getattr(item, item_key) class Request(wsgi.Request): """Add some OpenStack API-specific logic to the base webob.Request.""" def __init__(self, *args, **kwargs): super(Request, self).__init__(*args, **kwargs) self._extension_data = {'db_items': {}} if not hasattr(self, 'api_version_request'): self.api_version_request = api_version.APIVersionRequest() def cache_db_items(self, key, items, item_key='id'): """Allow API methods to store objects from a DB query to be used by API extensions within the same API request. An instance of this class only lives for the lifetime of a single API request, so there's no need to implement full cache management. """ db_items = self._extension_data['db_items'].setdefault(key, {}) for item in items: db_items[item_get(item, item_key)] = item def get_db_items(self, key): """Allow an API extension to get previously stored objects within the same API request. Note that the object data will be slightly stale. """ return self._extension_data['db_items'][key] def get_db_item(self, key, item_key): """Allow an API extension to get a previously stored object within the same API request. Note that the object data will be slightly stale. """ return self.get_db_items(key).get(item_key) def best_match_content_type(self): """Determine the requested response content-type.""" if 'masakari.best_content_type' not in self.environ: # Calculate the best MIME type content_type = None # Check URL path suffix parts = self.path.rsplit('.', 1) if len(parts) > 1: possible_type = 'application/' + parts[1] if possible_type in get_supported_content_types(): content_type = possible_type if not content_type: content_type = self.accept.best_match( get_supported_content_types()) self.environ['masakari.best_content_type'] = (content_type or 'application/json') return self.environ['masakari.best_content_type'] def get_content_type(self): """Determine content type of the request body. Does not do any body introspection, only checks header """ if "Content-Type" not in self.headers: return None content_type = self.content_type # NOTE: text/plain is the default for eventlet and # other webservers which use mimetools.Message.gettype() # whereas twisted defaults to ''. if not content_type or content_type == 'text/plain': return None if content_type not in get_supported_content_types(): raise exception.InvalidContentType(content_type=content_type) return content_type def best_match_language(self): """Determine the best available language for the request. :returns: the best language match or None if the 'Accept-Language' header was not available in the request. """ if not self.accept_language: return None return self.accept_language.best_match(i18n.get_available_languages()) def set_api_version_request(self): """Set API version request based on the request header information.""" hdr_string = microversion_parse.get_version( self.headers, service_type='instance-ha') if hdr_string is None: self.api_version_request = api_version.APIVersionRequest( api_version.DEFAULT_API_VERSION) elif hdr_string == 'latest': # 'latest' is a special keyword which is equivalent to # requesting the maximum version of the API supported self.api_version_request = api_version.max_api_version() else: self.api_version_request = api_version.APIVersionRequest( hdr_string) # Check that the version requested is within the global # minimum/maximum of supported API versions if not self.api_version_request.matches( api_version.min_api_version(), api_version.max_api_version()): raise exception.InvalidGlobalAPIVersion( req_ver=self.api_version_request.get_string(), min_ver=api_version.min_api_version().get_string(), max_ver=api_version.max_api_version().get_string()) class ActionDispatcher(object): """Maps method name to local methods through action name.""" def dispatch(self, *args, **kwargs): """Find and call local method.""" action = kwargs.pop('action', 'default') action_method = getattr(self, str(action), self.default) return action_method(*args, **kwargs) def default(self, data): raise NotImplementedError() class JSONDeserializer(ActionDispatcher): def _from_json(self, datastring): try: return jsonutils.loads(datastring) except ValueError: msg = _("cannot understand JSON") raise exception.MalformedRequestBody(reason=msg) def deserialize(self, datastring, action='default'): return self.dispatch(datastring, action=action) def default(self, datastring): return {'body': self._from_json(datastring)} class JSONDictSerializer(ActionDispatcher): """Default JSON request body serialization.""" def serialize(self, data, action='default'): return self.dispatch(data, action=action) def default(self, data): return str(jsonutils.dumps(data)) def response(code): """Attaches response code to a method. This decorator associates a response code with a method. Note that the function attributes are directly manipulated; the method is not wrapped. """ def decorator(func): func.wsgi_code = code return func return decorator class ResponseObject(object): """Bundles a response object Object that app methods may return in order to allow its response to be modified by extensions in the code. Its use is optional (and should only be used if you really know what you are doing). """ def __init__(self, obj, code=None, headers=None): """Builds a response object.""" self.obj = obj self._default_code = HTTPStatus.OK self._code = code self._headers = headers or {} self.serializer = JSONDictSerializer() def __getitem__(self, key): """Retrieves a header with the given name.""" return self._headers[key.lower()] def __setitem__(self, key, value): """Sets a header with the given name to the given value.""" self._headers[key.lower()] = value def __delitem__(self, key): """Deletes the header with the given name.""" del self._headers[key.lower()] def serialize(self, request, content_type): """Serializes the wrapped object. Utility method for serializing the wrapped object. Returns a webob.Response object. """ serializer = self.serializer body = None if self.obj is not None: body = serializer.serialize(self.obj) response = webob.Response(body=body) if response.headers.get('Content-Length'): response.headers['Content-Length'] = (str( response.headers['Content-Length'])) response.status_int = self.code for hdr, value in self._headers.items(): response.headers[hdr] = str(value) response.headers['Content-Type'] = str(content_type) return response @property def code(self): """Retrieve the response status.""" return self._code or self._default_code @property def headers(self): """Retrieve the headers.""" return self._headers.copy() def action_peek(body): """Determine action to invoke. This looks inside the json body and fetches out the action method name. """ try: decoded = jsonutils.loads(body) except ValueError: msg = _("cannot understand JSON") raise exception.MalformedRequestBody(reason=msg) # Make sure there's exactly one key... if len(decoded) != 1: msg = _("too many body keys") raise exception.MalformedRequestBody(reason=msg) # Return the action name return list(decoded.keys())[0] class ResourceExceptionHandler(object): """Context manager to handle Resource exceptions. Used when processing exceptions generated by API implementation methods (or their extensions). Converts most exceptions to Fault exceptions, with the appropriate logging. """ def __enter__(self): return None def __exit__(self, ex_type, ex_value, ex_traceback): if not ex_value: return True if isinstance(ex_value, exception.Forbidden): raise Fault(webob.exc.HTTPForbidden( explanation=ex_value.format_message())) elif isinstance(ex_value, exception.VersionNotFoundForAPIMethod): raise elif isinstance(ex_value, exception.Invalid): raise Fault(exception.ConvertedException( code=ex_value.code, explanation=ex_value.format_message())) elif isinstance(ex_value, TypeError): exc_info = (ex_type, ex_value, ex_traceback) LOG.error('Exception handling resource: %s', ex_value, exc_info=exc_info) raise Fault(webob.exc.HTTPBadRequest()) elif isinstance(ex_value, Fault): LOG.info("Fault thrown: %s", ex_value) raise ex_value elif isinstance(ex_value, webob.exc.HTTPException): LOG.info("HTTP exception thrown: %s", ex_value) raise Fault(ex_value) # We didn't handle the exception return False class Resource(wsgi.Application): """WSGI app that handles (de)serialization and controller dispatch. WSGI app that reads routing information supplied by RoutesMiddleware and calls the requested action method upon its controller. All controller action methods must accept a 'req' argument, which is the incoming wsgi.Request. If the operation is a PUT or POST, the controller method must also accept a 'body' argument (the deserialized request body). They may raise a webob.exc exception or return a dict, which will be serialized by requested content type. Exceptions derived from webob.exc.HTTPException will be automatically wrapped in Fault() to provide API friendly error responses. """ support_api_request_version = False def __init__(self, controller, inherits=None): """:param controller: object that implement methods created by routes lib :param inherits: another resource object that this resource should inherit extensions from. Any action extensions that are applied to the parent resource will also apply to this resource. """ self.controller = controller self.default_serializers = dict(json=JSONDictSerializer) # Copy over the actions dictionary self.wsgi_actions = {} if controller: self.register_actions(controller) # Save a mapping of extensions self.wsgi_extensions = {} self.wsgi_action_extensions = {} self.inherits = inherits def register_actions(self, controller): """Registers controller actions with this resource.""" actions = getattr(controller, 'wsgi_actions', {}) for key, method_name in actions.items(): self.wsgi_actions[key] = getattr(controller, method_name) def register_extensions(self, controller): """Registers controller extensions with this resource.""" extensions = getattr(controller, 'wsgi_extensions', []) for method_name, action_name in extensions: # Look up the extending method extension = getattr(controller, method_name) if action_name: # Extending an action... if action_name not in self.wsgi_action_extensions: self.wsgi_action_extensions[action_name] = [] self.wsgi_action_extensions[action_name].append(extension) else: # Extending a regular method if method_name not in self.wsgi_extensions: self.wsgi_extensions[method_name] = [] self.wsgi_extensions[method_name].append(extension) def get_action_args(self, request_environment): """Parse dictionary created by routes library.""" # NOTE: Check for get_action_args() override in the # controller if hasattr(self.controller, 'get_action_args'): return self.controller.get_action_args(request_environment) try: args = request_environment['wsgiorg.routing_args'][1].copy() except (KeyError, IndexError, AttributeError): return {} try: del args['controller'] except KeyError: pass try: del args['format'] except KeyError: pass return args def get_body(self, request): content_type = request.get_content_type() return content_type, request.body def deserialize(self, body): return JSONDeserializer().deserialize(body) def pre_process_extensions(self, extensions, request, action_args): # List of callables for post-processing extensions post = [] for ext in extensions: if inspect.isgeneratorfunction(ext): response = None # If it's a generator function, the part before the # yield is the preprocessing stage try: with ResourceExceptionHandler(): gen = ext(req=request, **action_args) response = next(gen) except Fault as ex: response = ex # We had a response... if response: return response, [] # No response, queue up generator for post-processing post.append(gen) else: # Regular functions only perform post-processing post.append(ext) # None is response, it means we keep going. We reverse the # extension list for post-processing. return None, reversed(post) def post_process_extensions(self, extensions, resp_obj, request, action_args): for ext in extensions: response = None if inspect.isgenerator(ext): # If it's a generator, run the second half of # processing try: with ResourceExceptionHandler(): response = ext.send(resp_obj) except StopIteration: # Normal exit of generator continue except Fault as ex: response = ex else: # Regular functions get post-processing... try: with ResourceExceptionHandler(): response = ext(req=request, resp_obj=resp_obj, **action_args) except exception.VersionNotFoundForAPIMethod: # If an attached extension (@wsgi.extends) for the # method has no version match its not an error. We # just don't run the extends code continue except Fault as ex: response = ex # We had a response... if response: return response return None def _should_have_body(self, request): return request.method in _METHODS_WITH_BODY @webob.dec.wsgify(RequestClass=Request) def __call__(self, request): """WSGI method that controls (de)serialization and method dispatch.""" if self.support_api_request_version: # Set the version of the API requested based on the header try: request.set_api_version_request() except exception.InvalidAPIVersionString as e: return Fault(webob.exc.HTTPBadRequest( explanation=e.format_message())) except exception.InvalidGlobalAPIVersion as e: return Fault(webob.exc.HTTPNotAcceptable( explanation=e.format_message())) # Identify the action, its arguments, and the requested # content type action_args = self.get_action_args(request.environ) action = action_args.pop('action', None) # NOTE: we filter out InvalidContentTypes early so we # know everything is good from here on out. try: content_type, body = self.get_body(request) accept = request.best_match_content_type() except exception.InvalidContentType: msg = _("Unsupported Content-Type") return Fault(webob.exc.HTTPUnsupportedMediaType(explanation=msg)) # NOTE: Splitting the function up this way allows for # auditing by external tools that wrap the existing # function. If we try to audit __call__(), we can # run into troubles due to the @webob.dec.wsgify() # decorator. return self._process_stack(request, action, action_args, content_type, body, accept) def _process_stack(self, request, action, action_args, content_type, body, accept): """Implement the processing stack.""" # Get the implementing method try: meth, extensions = self.get_method(request, action, content_type, body) except (AttributeError, TypeError): return Fault(webob.exc.HTTPNotFound()) except KeyError as ex: msg = _("There is no such action: %s") % ex.args[0] return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except webob.exc.HTTPMethodNotAllowed as e: return Fault(e) if body: msg = _("Action: '%(action)s', calling method: %(meth)s, body: " "%(body)s") % {'action': action, 'body': str(body, 'utf-8'), 'meth': str(meth)} LOG.debug(strutils.mask_password(msg)) else: LOG.debug("Calling method '%(meth)s'", {'meth': str(meth)}) # Now, deserialize the request body... try: contents = {} if self._should_have_body(request): # allow empty body with PUT and POST if request.content_length == 0: contents = {'body': None} else: contents = self.deserialize(body) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Update the action args action_args.update(contents) project_id = action_args.pop("project_id", None) context = request.environ.get('masakari.context') if (context and project_id and (project_id != context.project_id)): msg = _("Malformed request URL: URL's project_id '%(project_id)s'" " doesn't match Context's project_id" " '%(context_project_id)s'") % { 'project_id': project_id, 'context_project_id': context.project_id } return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Run pre-processing extensions response, post = self.pre_process_extensions(extensions, request, action_args) if not response: try: with ResourceExceptionHandler(): action_result = self.dispatch(meth, request, action_args) except Fault as ex: response = ex if not response: # No exceptions; convert action_result into a # ResponseObject resp_obj = None if type(action_result) is dict or action_result is None: resp_obj = ResponseObject(action_result) elif isinstance(action_result, ResponseObject): resp_obj = action_result else: response = action_result # Run post-processing extensions if resp_obj: # Do a preserialize to set up the response object if hasattr(meth, 'wsgi_code'): resp_obj._default_code = meth.wsgi_code # Process post-processing extensions response = self.post_process_extensions(post, resp_obj, request, action_args) if resp_obj and not response: response = resp_obj.serialize(request, accept) if hasattr(response, 'headers'): for hdr, val in list(response.headers.items()): response.headers[hdr] = str(val) if not request.api_version_request.is_null(): response.headers[API_VERSION_REQUEST_HEADER] = \ 'masakari ' + request.api_version_request.get_string() response.headers.add('Vary', API_VERSION_REQUEST_HEADER) return response def get_method(self, request, action, content_type, body): meth, extensions = self._get_method(request, action, content_type, body) if self.inherits: _meth, parent_ext = self.inherits.get_method(request, action, content_type, body) extensions.extend(parent_ext) return meth, extensions def _get_method(self, request, action, content_type, body): """Look up the action-specific method and its extensions.""" # Look up the method try: if not self.controller: meth = getattr(self, action) else: meth = getattr(self.controller, action) except AttributeError: if (not self.wsgi_actions or action not in _ROUTES_METHODS + ['action']): if self.controller: msg = _("The request method: '%(method)s' with action: " "'%(action)s' is not allowed on this " "resource") % { 'method': request.method, 'action': action } raise webob.exc.HTTPMethodNotAllowed( explanation=msg, body_template='${explanation}') # Propagate the error raise else: return meth, self.wsgi_extensions.get(action, []) if action == 'action': action_name = action_peek(body) else: action_name = action # Look up the action method return (self.wsgi_actions[action_name], self.wsgi_action_extensions.get(action_name, [])) def dispatch(self, method, request, action_args): """Dispatch a call to the action-specific method.""" try: return method(req=request, **action_args) except exception.VersionNotFoundForAPIMethod: # We deliberately don't return any message information # about the exception to the user so it looks as if # the method is simply not implemented. return Fault(webob.exc.HTTPNotFound()) class ResourceV1(Resource): support_api_request_version = True def action(name): """Mark a function as an action. The given name will be taken as the action key in the body. This is also overloaded to allow extensions to provide non-extending definitions of create and delete operations. """ def decorator(func): func.wsgi_action = name return func return decorator def extends(*args, **kwargs): """Indicate a function extends an operation. Can be used as either:: @extends def index(...): pass or as:: @extends(action='resize') def _action_resize(...): pass """ def decorator(func): # Store enough information to find what we're extending func.wsgi_extends = (func.__name__, kwargs.get('action')) return func # If we have positional arguments, call the decorator if args: return decorator(*args) # OK, return the decorator instead return decorator class ControllerMetaclass(type): """Controller metaclass. This metaclass automates the task of assembling a dictionary mapping action keys to method names. """ def __new__(mcs, name, bases, cls_dict): """Adds the wsgi_actions dictionary to the class.""" # Find all actions actions = {} extensions = [] versioned_methods = None # start with wsgi actions from base classes for base in bases: actions.update(getattr(base, 'wsgi_actions', {})) if base.__name__ == "Controller": # NOTE: This resets the VER_METHOD_ATTR attribute # between API controller class creations. This allows us # to use a class decorator on the API methods that doesn't # require naming explicitly what method is being versioned as # it can be implicit based on the method decorated. It is a bit # ugly. if VER_METHOD_ATTR in base.__dict__: versioned_methods = getattr(base, VER_METHOD_ATTR) delattr(base, VER_METHOD_ATTR) for key, value in cls_dict.items(): if not callable(value): continue if getattr(value, 'wsgi_action', None): actions[value.wsgi_action] = key elif getattr(value, 'wsgi_extends', None): extensions.append(value.wsgi_extends) # Add the actions and extensions to the class dict cls_dict['wsgi_actions'] = actions cls_dict['wsgi_extensions'] = extensions if versioned_methods: cls_dict[VER_METHOD_ATTR] = versioned_methods return super(ControllerMetaclass, mcs).__new__(mcs, name, bases, cls_dict) class Controller(object, metaclass=ControllerMetaclass): """Default controller.""" _view_builder_class = None def __init__(self, view_builder=None): """Initialize controller with a view builder instance.""" if view_builder: self._view_builder = view_builder elif self._view_builder_class: self._view_builder = self._view_builder_class() else: self._view_builder = None def __getattribute__(self, key): def version_select(*args, **kwargs): """Look for the method which matches the name supplied and version constraints and calls it with the supplied arguments. @return: Returns the result of the method called @raises: VersionNotFoundForAPIMethod if there is no method which matches the name and version constraints """ # The first arg to all versioned methods is always the request # object. The version for the request is attached to the # request object if len(args) == 0: ver = kwargs['req'].api_version_request else: ver = args[0].api_version_request func_list = self.versioned_methods[key] for func in func_list: if ver.matches(func.start_version, func.end_version): # Update the version_select wrapper function so # other decorator attributes like wsgi.response # are still respected. functools.update_wrapper(version_select, func.func) return func.func(self, *args, **kwargs) # No version match raise exception.VersionNotFoundForAPIMethod(version=ver) try: version_meth_dict = object.__getattribute__(self, VER_METHOD_ATTR) except AttributeError: # No versioning on this class return object.__getattribute__(self, key) if version_meth_dict and key in object.__getattribute__( self, VER_METHOD_ATTR): return version_select return object.__getattribute__(self, key) # NOTE: This decorator MUST appear first (the outermost # decorator) on an API method for it to work correctly @classmethod def api_version(cls, min_ver, max_ver=None): """Decorator for versioning api methods. Add the decorator to any method which takes a request object as the first parameter and belongs to a class which inherits from wsgi.Controller. @min_ver: string representing minimum version @max_ver: optional string representing maximum version """ def decorator(f): obj_min_ver = api_version.APIVersionRequest(min_ver) if max_ver: obj_max_ver = api_version.APIVersionRequest(max_ver) else: obj_max_ver = api_version.APIVersionRequest() # Add to list of versioned methods registered func_name = f.__name__ new_func = versioned_method.VersionedMethod( func_name, obj_min_ver, obj_max_ver, f) func_dict = getattr(cls, VER_METHOD_ATTR, {}) if not func_dict: setattr(cls, VER_METHOD_ATTR, func_dict) func_list = func_dict.get(func_name, []) if not func_list: func_dict[func_name] = func_list func_list.append(new_func) # Ensure the list is sorted by minimum version (reversed) # so later when we work through the list in order we find # the method which has the latest version which supports # the version requested. is_intersect = Controller.check_for_versions_intersection( func_list) if is_intersect: raise exception.ApiVersionsIntersect( name=new_func.name, min_ver=new_func.start_version, max_ver=new_func.end_version, ) func_list.sort(key=lambda f: f.start_version, reverse=True) return f return decorator @staticmethod def is_valid_body(body, entity_name): if not (body and entity_name in body): return False def is_dict(d): try: d.get(None) return True except AttributeError: return False return is_dict(body[entity_name]) @staticmethod def check_for_versions_intersection(func_list): """Determines function list contains version intervals intersections. General algorithm: https://en.wikipedia.org/wiki/Intersection_algorithm :param func_list: list of VersionedMethod objects :return: boolean """ pairs = [] counter = 0 for f in func_list: pairs.append((f.start_version, 1, f)) pairs.append((f.end_version, -1, f)) def compare(x): return x[0] pairs.sort(key=compare) for p in pairs: counter += p[1] if counter > 1: return True return False class Fault(webob.exc.HTTPException): """Wrap webob.exc.HTTPException to provide API friendly response.""" _fault_names = { HTTPStatus.BAD_REQUEST: "badRequest", HTTPStatus.UNAUTHORIZED: "unauthorized", HTTPStatus.FORBIDDEN: "forbidden", HTTPStatus.NOT_FOUND: "itemNotFound", HTTPStatus.METHOD_NOT_ALLOWED: "badMethod", HTTPStatus.CONFLICT: "conflictingRequest", HTTPStatus.REQUEST_ENTITY_TOO_LARGE: "overLimit", HTTPStatus.UNSUPPORTED_MEDIA_TYPE: "badMediaType", HTTPStatus.NOT_IMPLEMENTED: "notImplemented", HTTPStatus.SERVICE_UNAVAILABLE: "serviceUnavailable", HTTPStatus.TOO_MANY_REQUESTS: "overLimit" } def __init__(self, exception): """Create a Fault for the given webob.exc.exception.""" self.wrapped_exc = exception for key, value in list(self.wrapped_exc.headers.items()): self.wrapped_exc.headers[key] = str(value) self.status_int = exception.status_int @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): """Generate a WSGI response based on the exception passed to ctor.""" user_locale = req.best_match_language() # Replace the body with fault details. code = self.wrapped_exc.status_int fault_name = self._fault_names.get(code, "masakariFault") explanation = self.wrapped_exc.explanation LOG.debug("Returning %(code)s to user: %(explanation)s", {'code': code, 'explanation': explanation}) explanation = i18n.translate(explanation, user_locale) fault_data = { fault_name: { 'code': code, 'message': explanation}} if code == HTTPStatus.REQUEST_ENTITY_TOO_LARGE or code == 429: retry = self.wrapped_exc.headers.get('Retry-After', None) if retry: fault_data[fault_name]['retryAfter'] = retry if not req.api_version_request.is_null(): self.wrapped_exc.headers[API_VERSION_REQUEST_HEADER] = \ 'instance-ha ' + req.api_version_request.get_string() self.wrapped_exc.headers.add('Vary', API_VERSION_REQUEST_HEADER) self.wrapped_exc.content_type = 'application/json' self.wrapped_exc.charset = 'UTF-8' self.wrapped_exc.text = JSONDictSerializer().serialize(fault_data) return self.wrapped_exc def __str__(self): return self.wrapped_exc.__str__() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/urlmap.py0000664000175100017510000002374615033036143017655 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from oslo_log import log as logging import paste.urlmap from urllib import request from masakari.api.openstack import wsgi LOG = logging.getLogger(__name__) _quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"' _option_header_piece_re = re.compile( r';\s*([^\s;=]+|%s)\s*' r'(?:=\s*([^;]+|%s))?\s*' % (_quoted_string_re, _quoted_string_re)) def unquote_header_value(value): """Unquotes a header value. This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. """ if value and value[0] == value[-1] == '"': # this is not the real unquoting, but fixing this so that the # RFC is met will result in bugs with internet explorer and # probably some other browsers as well. IE for example is # uploading files with "C:\foo\bar.txt" as filename value = value[1:-1] return value def parse_list_header(value): """Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] :param value: a string with a list header. :return: :class:`list` """ result = [] for item in request.parse_http_list(value): if item[:1] == item[-1:] == '"': item = unquote_header_value(item[1:-1]) result.append(item) return result def parse_options_header(value): """Parse a ``Content-Type`` like header into a tuple with the content type and the options: >>> parse_options_header('Content-Type: text/html; mimetype=text/html') ('Content-Type:', {'mimetype': 'text/html'}) :param value: the header to parse. :return: (str, options) """ def _tokenize(string): for match in _option_header_piece_re.finditer(string): key, value = match.groups() key = unquote_header_value(key) if value is not None: value = unquote_header_value(value) yield key, value if not value: return '', {} parts = _tokenize(';' + value) name = next(parts)[0] extra = dict(parts) return name, extra class Accept(object): def __init__(self, value): self._content_types = [parse_options_header(v) for v in parse_list_header(value)] def best_match(self, supported_content_types): best_quality = -1 best_content_type = None best_params = {} best_match = '*/*' for content_type in supported_content_types: for content_mask, params in self._content_types: try: quality = float(params.get('q', 1)) except ValueError: continue if quality < best_quality: continue elif best_quality == quality: if best_match.count('*') <= content_mask.count('*'): continue if self._match_mask(content_mask, content_type): best_quality = quality best_content_type = content_type best_params = params best_match = content_mask return best_content_type, best_params def _match_mask(self, mask, content_type): if '*' not in mask: return content_type == mask if mask == '*/*': return True mask_major = mask[:-2] content_type_major = content_type.split('/', 1)[0] return content_type_major == mask_major def urlmap_factory(loader, global_conf, **local_conf): if 'not_found_app' in local_conf: not_found_app = local_conf.pop('not_found_app') else: not_found_app = global_conf.get('not_found_app') if not_found_app: not_found_app = loader.get_app(not_found_app, global_conf=global_conf) urlmap = URLMap(not_found_app=not_found_app) for path, app_name in local_conf.items(): path = paste.urlmap.parse_path_expression(path) app = loader.get_app(app_name, global_conf=global_conf) urlmap[path] = app return urlmap class URLMap(paste.urlmap.URLMap): def _match(self, host, port, path_info): """Find longest match for a given URL path.""" for (domain, app_url), app in self.applications: if domain and domain != host and domain != host + ':' + port: continue if (path_info == app_url or path_info.startswith(app_url + '/')): return app, app_url return None, None def _set_script_name(self, app, app_url): def wrap(environ, start_response): environ['SCRIPT_NAME'] += app_url return app(environ, start_response) return wrap def _munge_path(self, app, path_info, app_url): def wrap(environ, start_response): environ['SCRIPT_NAME'] += app_url environ['PATH_INFO'] = path_info[len(app_url):] return app(environ, start_response) return wrap def _path_strategy(self, host, port, path_info): """Check path suffix for MIME type and path prefix for API version.""" mime_type = app = app_url = None parts = path_info.rsplit('.', 1) if len(parts) > 1: possible_type = 'application/' + parts[1] if possible_type in wsgi.get_supported_content_types(): mime_type = possible_type parts = path_info.split('/') if len(parts) > 1: possible_app, possible_app_url = self._match(host, port, path_info) # Don't use prefix if it ends up matching default if possible_app and possible_app_url: app_url = possible_app_url app = self._munge_path(possible_app, path_info, app_url) return mime_type, app, app_url def _content_type_strategy(self, host, port, environ): """Check Content-Type header for API version.""" app = None params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1] if 'version' in params: app, app_url = self._match(host, port, '/v' + params['version']) if app: app = self._set_script_name(app, app_url) return app def _accept_strategy(self, host, port, environ, supported_content_types): """Check Accept header for best matching MIME type and API version.""" accept = Accept(environ.get('HTTP_ACCEPT', '')) app = None # Find the best match in the Accept header mime_type, params = accept.best_match(supported_content_types) if 'version' in params: app, app_url = self._match(host, port, '/v' + params['version']) if app: app = self._set_script_name(app, app_url) return mime_type, app def __call__(self, environ, start_response): host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower() if ':' in host: host, port = host.split(':', 1) else: if environ['wsgi.url_scheme'] == 'http': port = '80' else: port = '443' path_info = environ['PATH_INFO'] path_info = self.normalize_url(path_info, False)[1] # The MIME type for the response is determined in one of two ways: # 1) URL path suffix (eg /servers/detail.json) # 2) Accept header (eg application/json;q=0.8, application/xml;q=0.2) # The API version is determined in one of three ways: # 1) URL path prefix (eg /v1.1/tenant/servers/detail) # 2) Content-Type header (eg application/json;version=1.1) # 3) Accept header (eg application/json;q=0.8;version=1.1) supported_content_types = list(wsgi.get_supported_content_types()) mime_type, app, app_url = self._path_strategy(host, port, path_info) # Accept application/atom+xml for the index query of each API # version mount point as well as the root index if (app_url and app_url + '/' == path_info) or path_info == '/': supported_content_types.append('application/atom+xml') if not app: app = self._content_type_strategy(host, port, environ) if not mime_type or not app: possible_mime_type, possible_app = self._accept_strategy( host, port, environ, supported_content_types) if possible_mime_type and not mime_type: mime_type = possible_mime_type if possible_app and not app: app = possible_app if not mime_type: mime_type = 'application/json' if not app: # Didn't match a particular version, probably matches default app, app_url = self._match(host, port, path_info) if app: app = self._munge_path(app, path_info, app_url) if app: environ['masakari.best_content_type'] = mime_type return app(environ, start_response) LOG.debug('Could not find application for %s', environ['PATH_INFO']) environ['paste.urlmap_object'] = self return self.not_found_application(environ, start_response) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/utils.py0000664000175100017510000001062215033036143017502 0ustar00mylesmyles# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket from masakari.notifications.objects import base as notification_base from masakari.notifications.objects import exception as notification_exception from masakari.notifications.objects import notification as event_notification from masakari.objects import fields def _get_fault_and_priority_from_exc_and_tb(exception, tb): fault = None priority = fields.EventNotificationPriority.INFO if exception: priority = fields.EventNotificationPriority.ERROR fault = notification_exception.ExceptionPayload.from_exc_and_traceback( exception, tb) return fault, priority def notify_about_segment_api(context, segment, action, phase=None, binary='masakari-api', exception=None, tb=None): """Send versioned notification about a segment API. :param segment: FailoverSegment object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.SegmentApiPayload( segment=segment, fault=fault) api_notification = event_notification.SegmentApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) api_notification.emit(context) def notify_about_host_api(context, host, action, phase=None, binary='masakari-api', exception=None, tb=None): """Send versioned notification about a host API. :param host: Host object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.HostApiPayload(host=host, fault=fault) api_notification = event_notification.HostApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) api_notification.emit(context) def notify_about_notification_api(context, notification, action, phase=None, binary='masakari-api', exception=None, tb=None): """Send versioned notification about a notification api. :param notification: Notification object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.NotificationApiPayload( notification=notification, fault=fault) api_notification = event_notification.NotificationApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) api_notification.emit(context) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52671 masakari-19.1.0.dev18/masakari/api/validation/0000775000175100017510000000000015033036146020124 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/validation/__init__.py0000664000175100017510000000371215033036143022235 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Request Body validating middleware. """ import functools from masakari.api import api_version_request as api_version from masakari.api.validation.validators import _SchemaValidator def schema(request_body_schema, min_version=None, max_version=None): """Register a schema to validate request body. Registered schema will be used for validating request body just before API method executing. :argument dict request_body_schema: a schema to validate request body """ def add_validator(func): @functools.wraps(func) def wrapper(*args, **kwargs): min_ver = api_version.APIVersionRequest(min_version) max_ver = api_version.APIVersionRequest(max_version) if 'req' in kwargs: ver = kwargs['req'].api_version_request else: ver = args[1].api_version_request # Only validate against the schema if it lies within # the version range specified. Note that, if both min # and max are not specified, the validator will always # be run. if ver.matches(min_ver, max_ver): schema_validator = _SchemaValidator(request_body_schema) schema_validator.validate(kwargs['body']) return func(*args, **kwargs) return wrapper return add_validator ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/validation/parameter_types.py0000664000175100017510000001222315033036143023677 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Common parameter types for validating request Body. """ import re import unicodedata from masakari.i18n import _ class ValidationRegex(object): def __init__(self, regex, reason): self.regex = regex self.reason = reason def _is_printable(char): """determine if a unicode code point is printable. This checks if the character is either "other" (mostly control codes), or a non-horizontal space. All characters that don't match those criteria are considered printable; that is: letters; combining marks; numbers; punctuation; symbols; (horizontal) space separators. """ category = unicodedata.category(char) return (not category.startswith("C") and (not category.startswith("Z") or category == "Zs")) def _get_all_chars(): for i in range(0xFFFF): yield chr(i) # build a regex that matches all printable characters. This allows # spaces in the middle of the name. Also note that the regexp below # deliberately allows the empty string. This is so only the constraint # which enforces a minimum length for the name is triggered when an # empty string is tested. Otherwise it is not deterministic which # constraint fails and this causes issues for some unittests when # PYTHONHASHSEED is set randomly. def _build_regex_range(ws=True, invert=False, exclude=None): """Build a range regex for a set of characters in utf8. This builds a valid range regex for characters in utf8 by iterating the entire space and building up a set of x-y ranges for all the characters we find which are valid. :param ws: should we include whitespace in this range. :param exclude: any characters we want to exclude :param invert: invert the logic The inversion is useful when we want to generate a set of ranges which is everything that's not a certain class. For instance, produce all all the non printable characters as a set of ranges. """ if exclude is None: exclude = [] regex = "" # are we currently in a range in_range = False # last character we found, for closing ranges last = None # last character we added to the regex, this lets us know that we # already have B in the range, which means we don't need to close # it out with B-B. While the later seems to work, it's kind of bad form. last_added = None def valid_char(char): if char in exclude: result = False elif ws: result = _is_printable(char) else: # Zs is the unicode class for space characters, of which # there are about 10 in this range. result = (_is_printable(char) and unicodedata.category(char) != "Zs") if invert is True: return not result return result # iterate through the entire character range. in_ for c in _get_all_chars(): if valid_char(c): if not in_range: regex += re.escape(c) last_added = c in_range = True else: if in_range and last != last_added: regex += "-" + re.escape(last) in_range = False last = c else: if in_range: regex += "-" + re.escape(c) return regex valid_description_regex_base = '^[\n%s]*$' valid_name_regex_base = '^(?![%s])[%s]*(? 0: detail = _("Invalid input for field/attribute %(path)s." " Value: %(value)s. %(message)s") % { 'path': ex.path.pop(), 'value': ex.instance, 'message': ex.message } else: detail = ex.message raise exception.ValidationError(detail=detail) except TypeError as ex: # NOTE: If passing non string value to patternProperties parameter, # TypeError happens. Here is for catching the TypeError. detail = str(ex) raise exception.ValidationError(detail=detail) def _number_from_str(self, instance): try: value = int(instance) except (ValueError, TypeError): try: value = float(instance) except (ValueError, TypeError): return None return value def _validate_minimum(self, validator, minimum, instance, schema): instance = self._number_from_str(instance) if instance is None: return return self.validator_org.VALIDATORS['minimum'](validator, minimum, instance, schema) def _validate_maximum(self, validator, maximum, instance, schema): instance = self._number_from_str(instance) if instance is None: return return self.validator_org.VALIDATORS['maximum'](validator, maximum, instance, schema) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/versioned_method.py0000664000175100017510000000234415033036143021702 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class VersionedMethod(object): def __init__(self, name, start_version, end_version, func): """Versioning information for a single method @name: Name of the method @start_version: Minimum acceptable version @end_version: Maximum acceptable_version @func: Method to call Minimum and maximums are inclusive """ self.name = name self.start_version = start_version self.end_version = end_version self.func = func def __str__(self): return ("Version Method %s: min: %s, max: %s" % (self.name, self.start_version, self.end_version)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/api/wsgi.py0000664000175100017510000004071015033036143017314 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utility methods for working with WSGI servers.""" import os.path import socket import ssl import sys import eventlet import eventlet.wsgi import greenlet from oslo_log import log as logging from oslo_service import service from oslo_utils import excutils from paste import deploy import routes.middleware import webob.dec import webob.exc import masakari.conf from masakari import exception from masakari.i18n import _ from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) class Server(service.ServiceBase): """Server class to manage a WSGI server, serving a WSGI application.""" default_pool_size = CONF.wsgi.default_pool_size def __init__(self, name, app, host='0.0.0.0', port=0, pool_size=None, protocol=eventlet.wsgi.HttpProtocol, backlog=128, use_ssl=False, max_url_len=None): """Initialize, but do not start, a WSGI server. :param name: Pretty name for logging. :param app: The WSGI application to serve. :param host: IP address to serve the application. :param port: Port number to server the application. :param pool_size: Maximum number of eventlets to spawn concurrently. :param backlog: Maximum number of queued connections. :param max_url_len: Maximum length of permitted URLs. :returns: None :raises: masakari.exception.InvalidInput """ # Allow operators to customize http requests max header line size. eventlet.wsgi.MAX_HEADER_LINE = CONF.wsgi.max_header_line self.name = name self.app = app self._server = None self._protocol = protocol self.pool_size = pool_size or self.default_pool_size self._pool = eventlet.GreenPool(self.pool_size) self._logger = logging.getLogger("masakari.%s.wsgi.server" % self.name) self._use_ssl = use_ssl self._max_url_len = max_url_len self.client_socket_timeout = CONF.wsgi.client_socket_timeout or None if backlog < 1: raise exception.InvalidInput( reason=_('The backlog must be more than 0')) bind_addr = (host, port) try: info = socket.getaddrinfo(bind_addr[0], bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)[0] family = info[0] bind_addr = info[-1] except Exception: family = socket.AF_INET try: self._socket = eventlet.listen(bind_addr, family, backlog=backlog) except EnvironmentError: LOG.error("Could not bind to %(host)s:%(port)d", {'host': host, 'port': port}) raise (self.host, self.port) = self._socket.getsockname()[0:2] LOG.info("%(name)s listening on %(host)s:%(port)d", {'name': self.name, 'host': self.host, 'port': self.port}) def start(self): """Start serving a WSGI application. :returns: None """ # The server socket object will be closed after server exits, # but the underlying file descriptor will remain open, and will # give bad file descriptor error. So duplicating the socket object, # to keep file descriptor usable. dup_socket = self._socket.dup() dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # sockets can hang around forever without keepalive dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # This option isn't available in the OS X version of eventlet if hasattr(socket, 'TCP_KEEPIDLE'): dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, CONF.wsgi.tcp_keepidle) if self._use_ssl: try: ca_file = CONF.wsgi.ssl_ca_file cert_file = CONF.wsgi.ssl_cert_file key_file = CONF.wsgi.ssl_key_file if cert_file and not os.path.exists(cert_file): raise RuntimeError( _("Unable to find cert_file : %s") % cert_file) if ca_file and not os.path.exists(ca_file): raise RuntimeError( _("Unable to find ca_file : %s") % ca_file) if key_file and not os.path.exists(key_file): raise RuntimeError( _("Unable to find key_file : %s") % key_file) if self._use_ssl and (not cert_file or not key_file): raise RuntimeError( _("When running server in SSL mode, you must " "specify both a cert_file and key_file " "option value in your configuration file")) ssl_kwargs = { 'server_side': True, 'certfile': cert_file, 'keyfile': key_file, 'cert_reqs': ssl.CERT_NONE, } if CONF.wsgi.ssl_ca_file: ssl_kwargs['ca_certs'] = ca_file ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED dup_socket = eventlet.wrap_ssl(dup_socket, **ssl_kwargs) except Exception: with excutils.save_and_reraise_exception(): LOG.error("Failed to start %(name)s on %(host)s" ":%(port)d with SSL support", {'name': self.name, 'host': self.host, 'port': self.port}) wsgi_kwargs = { 'func': eventlet.wsgi.server, 'sock': dup_socket, 'site': self.app, 'protocol': self._protocol, 'custom_pool': self._pool, 'log': self._logger, 'log_format': CONF.wsgi.wsgi_log_format, 'debug': False, 'keepalive': CONF.wsgi.keep_alive, 'socket_timeout': self.client_socket_timeout } if self._max_url_len: wsgi_kwargs['url_length_limit'] = self._max_url_len self._server = utils.spawn(**wsgi_kwargs) def reset(self): """Reset server greenpool size to default. :returns: None """ self._pool.resize(self.pool_size) def stop(self): """Stop this server. This is not a very nice action, as currently the method by which a server is stopped is by killing its eventlet. :returns: None """ LOG.info("Stopping WSGI server.") if self._server is not None: # Resize pool to stop new requests from being processed self._pool.resize(0) self._server.kill() def wait(self): """Block, until the server has stopped. Waits on the server's eventlet to finish, then returns. :returns: None """ try: if self._server is not None: self._pool.waitall() self._server.wait() except greenlet.GreenletExit: LOG.info("WSGI server has stopped.") class Request(webob.Request): def __init__(self, environ, *args, **kwargs): if CONF.wsgi.secure_proxy_ssl_header: scheme = environ.get(CONF.wsgi.secure_proxy_ssl_header) if scheme: environ['wsgi.url_scheme'] = scheme super(Request, self).__init__(environ, *args, **kwargs) class Application(object): """Base WSGI application wrapper. Subclasses need to implement __call__.""" @classmethod def factory(cls, global_config, **local_config): """Used for paste app factories in paste.deploy config files. Any local configuration (that is, values under the [app:APPNAME] section of the paste config) will be passed into the `__init__` method as kwargs. A hypothetical configuration would look like: [app:wadl] latest_version = 1.3 paste.app_factory = masakari.api.fancy_api:Wadl.factory which would result in a call to the `Wadl` class as import masakari.api.fancy_api fancy_api.Wadl(latest_version='1.3') You could of course re-implement the `factory` method in subclasses, but using the kwarg passing it shouldn't be necessary. """ return cls(**local_config) def __call__(self, environ, start_response): r"""Subclasses will probably want to implement __call__ like this: @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): # Any of the following objects work as responses: # Option 1: simple string res = 'message\n' # Option 2: a nicely formatted HTTP exception page res = exc.HTTPForbidden(explanation='Nice try') # Option 3: a webob Response object (in case you need to play with # headers, or you want to be treated like an iterable, or ...) res = Response() res.app_iter = open('somefile') # Option 4: any wsgi app to be run next res = self.application # Option 5: you can get a Response object for a wsgi app, too, to # play with headers etc res = req.get_response(self.application) # You can then just return your response... return res # ... or set req.response and return None. req.response = res See the end of http://pythonpaste.org/webob/modules/dec.html for more info. """ raise NotImplementedError(_('You must implement __call__')) class Middleware(Application): """Base WSGI middleware. These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ @classmethod def factory(cls, global_config, **local_config): """Used for paste app factories in paste.deploy config files. Any local configuration (that is, values under the [filter:APPNAME] section of the paste config) will be passed into the `__init__` method as kwargs. A hypothetical configuration would look like: [filter:analytics] redis_host = 127.0.0.1 paste.filter_factory = masakari.api.analytics:Analytics.factory which would result in a call to the `Analytics` class as import masakari.api.analytics analytics.Analytics(app_from_paste, redis_host='127.0.0.1') You could of course re-implement the `factory` method in subclasses, but using the kwarg passing it shouldn't be necessary. """ def _factory(app): return cls(app, **local_config) return _factory def __init__(self, application): self.application = application def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) return self.process_response(response) class Debug(Middleware): """Helper class for debugging a WSGI application. Can be inserted into any WSGI application chain to get information about the request and response. """ @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): print(('*' * 40) + ' REQUEST ENVIRON') for key, value in req.environ.items(): print(key, '=', value) print() resp = req.get_response(self.application) print(('*' * 40) + ' RESPONSE HEADERS') for (key, value) in resp.headers.items(): print(key, '=', value) print() resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """Iterator that prints the contents of a wrapper string.""" print(('*' * 40) + ' BODY') for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part print() class Router(object): """WSGI middleware that maps incoming requests to WSGI apps.""" def __init__(self, mapper): """Create a router for the given routes.Mapper. Each route in `mapper` must specify a 'controller', which is a WSGI app to call. You'll probably want to specify an 'action' as well and have your controller be an object that can route the request to the action-specific method. Examples: mapper = routes.Mapper() sc = ServerController() # Explicit mapping of one route to a controller+action mapper.connect(None, '/svrlist', controller=sc, action='list') # Actions are all implicitly defined mapper.resource('server', 'servers', controller=sc) # Pointing to an arbitrary WSGI app. You can specify the # {path_info:.*} parameter so the target app can be handed just that # section of the URL. mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp()) """ self.map = mapper self._router = routes.middleware.RoutesMiddleware(self._dispatch, self.map) @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): """Route the incoming request to a controller based on self.map. If no match, return a 404. """ return self._router @staticmethod @webob.dec.wsgify(RequestClass=Request) def _dispatch(req): """Dispatch the request to the appropriate controller. Called by self._router after matching the incoming request to a route and putting the information into req.environ. Either returns 404 or the routed WSGI app's response. """ match = req.environ['wsgiorg.routing_args'][1] if not match: return webob.exc.HTTPNotFound() app = match['controller'] return app class Loader(object): """Used to load WSGI applications from paste configurations.""" def __init__(self, config_path=None): """Initialize the loader, and attempt to find the config. :param config_path: Full or relative path to the paste config. :returns: None """ self.config_path = None config_path = config_path or CONF.wsgi.api_paste_config if not os.path.isabs(config_path): self.config_path = CONF.find_file(config_path) elif os.path.exists(config_path): self.config_path = config_path if not self.config_path: raise exception.ConfigNotFound(path=config_path) def load_app(self, name): """Return the paste URLMap wrapped WSGI application. :param name: Name of the application to load. :returns: Paste URLMap object wrapping the requested application. :raises: `masakari.exception.PasteAppNotFound` """ try: LOG.debug("Loading app %(name)s from %(path)s", {'name': name, 'path': self.config_path}) return deploy.loadapp("config:%s" % self.config_path, name=name) except LookupError: LOG.exception("Couldn't lookup app: %s", name) raise exception.PasteAppNotFound(name=name, path=self.config_path) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52671 masakari-19.1.0.dev18/masakari/cmd/0000775000175100017510000000000015033036146015764 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/cmd/__init__.py0000664000175100017510000000120215033036143020065 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import eventlet eventlet.monkey_patch() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/cmd/api.py0000664000175100017510000000566015033036143017113 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Starter script for Masakari API. """ import os import sys from oslo_log import log as logging from oslo_service import _options as service_opts from paste import deploy from masakari.common import config import masakari.conf from masakari import config as api_config from masakari import coordination from masakari import exception from masakari import objects from masakari import rpc from masakari import service from masakari import version CONFIG_FILES = ['api-paste.ini', 'masakari.conf'] CONF = masakari.conf.CONF def _get_config_files(env=None): if env is None: env = os.environ dirname = env.get('OS_MASAKARI_CONFIG_DIR', '/etc/masakari').strip() return [os.path.join(dirname, config_file) for config_file in CONFIG_FILES] def main(): api_config.parse_args(sys.argv) logging.setup(CONF, "masakari") log = logging.getLogger(__name__) objects.register_all() launcher = service.process_launcher() try: use_coordination = bool(CONF.coordination.backend_url) server = service.WSGIService("masakari_api", use_ssl=CONF.use_ssl, coordination=use_coordination) launcher.launch_service(server, workers=server.workers or 1) except exception.PasteAppNotFound as ex: log.error("Failed to start ``masakari_api`` service. Error: %s", str(ex)) sys.exit(1) launcher.wait() def initialize_application(): conf_files = _get_config_files() api_config.parse_args([], default_config_files=conf_files) logging.setup(CONF, "masakari") objects.register_all() CONF(sys.argv[1:], project='masakari', version=version.version_string()) # NOTE: Dump conf at debug (log_options option comes from oslo.service) # This is gross but we don't have a public hook into oslo.service to # register these options, so we are doing it manually for now; # remove this when we have a hook method into oslo.service. CONF.register_opts(service_opts.service_opts) if CONF.log_options: CONF.log_opt_values(logging.getLogger(__name__), logging.DEBUG) config.set_middleware_defaults() if CONF.coordination.backend_url: coordination.COORDINATOR.start() rpc.init(CONF) conf = conf_files[0] return deploy.loadapp('config:%s' % conf, name="masakari_api") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/cmd/engine.py0000664000175100017510000000223615033036143017603 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Starter script for Masakari Engine.""" import sys from oslo_log import log as logging import masakari.conf from masakari import config from masakari import objects from masakari import service from masakari import utils CONF = masakari.conf.CONF def main(): config.parse_args(sys.argv) logging.setup(CONF, "masakari") utils.monkey_patch() objects.register_all() server = service.Service.create(binary='masakari-engine', topic=CONF.masakari_topic) service.serve(server) service.wait() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/cmd/manage.py0000664000175100017510000001446415033036143017574 0ustar00mylesmyles#!/usr/bin/env python3 # Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ CLI interface for masakari management. """ import logging as python_logging import sys import time from oslo_config import cfg from oslo_log import log as logging import masakari.conf from masakari import context from masakari import db from masakari.db import api as db_api from masakari.db.sqlalchemy import migration as db_migration from masakari import exception from masakari.i18n import _ from masakari import utils from masakari import version CONF = masakari.conf.CONF logging.register_options(CONF) # Decorators for actions def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) return func return _decorator def _db_error(caught_exception): print('%s' % caught_exception) print(_("The above error may show that the database has not " "been created.\nPlease create a database using " "'masakari-manage db sync' before running this command.")) sys.exit(1) class DbCommands(object): """Class for managing the database.""" def __init__(self): pass @args('version', nargs='?', default=None, type=int, help='Database version') def sync(self, version=None): """Sync the database up to the most recent version.""" try: return db_migration.db_sync(version) except exception.InvalidInput as ex: print(ex) sys.exit(1) def version(self): """Print the current database version.""" print(db_migration.db_version()) @args('--age_in_days', type=int, default=30, help='Purge deleted rows older than age in days (default: ' '%(default)d)') @args('--max_rows', type=int, default=-1, help='Limit number of records to delete (default: %(default)d)') def purge(self, age_in_days, max_rows): """Purge rows older than a given age from masakari tables.""" try: max_rows = utils.validate_integer( max_rows, 'max_rows', -1, db.MAX_INT) except exception.Invalid as exc: sys.exit(str(exc)) try: age_in_days = int(age_in_days) except ValueError: msg = 'Invalid value for age, %(age)s' % {'age': age_in_days} sys.exit(str(msg)) if max_rows == 0: sys.exit(_("Must supply value greater than 0 for max_rows.")) if age_in_days < 0: sys.exit(_("Must supply a non-negative value for age.")) if age_in_days >= (int(time.time()) / 86400): sys.exit(_("Maximal age is count of days since epoch.")) ctx = context.get_admin_context() db_api.purge_deleted_rows(ctx, age_in_days, max_rows) CATEGORIES = { 'db': DbCommands, } def methods_of(obj): """Return non-private methods from an object. Get all callable methods of an object that don't start with underscore :return: a list of tuples of the form (method_name, method) """ result = [] for i in dir(obj): if callable(getattr(obj, i)) and not i.startswith('_'): result.append((i, getattr(obj, i))) return result def add_command_parsers(subparsers): for category in CATEGORIES: command_object = CATEGORIES[category]() parser = subparsers.add_parser(category) parser.set_defaults(command_object=command_object) category_subparsers = parser.add_subparsers(dest='action') for (action, action_fn) in methods_of(command_object): parser = category_subparsers.add_parser(action) action_kwargs = [] for args, kwargs in getattr(action_fn, 'args', []): parser.add_argument(*args, **kwargs) parser.set_defaults(action_fn=action_fn) parser.set_defaults(action_kwargs=action_kwargs) command_opt = cfg.SubCommandOpt('category', title='Command categories', help='Available categories', handler=add_command_parsers) def get_arg_string(args): arg = None if args[0] == '-': # NOTE(Dinesh_Bhor): args starts with FLAGS.oparser.prefix_chars # is optional args. Notice that cfg module takes care of # actual ArgParser so prefix_chars is always '-'. if args[1] == '-': # This is long optional arg arg = args[2:] else: arg = args[1:] else: arg = args return arg def fetch_func_args(func): fn_args = [] for args, kwargs in getattr(func, 'args', []): arg = get_arg_string(args[0]) fn_args.append(getattr(CONF.category, arg)) return fn_args def main(): """Parse options and call the appropriate class/method.""" CONF.register_cli_opt(command_opt) script_name = sys.argv[0] if len(sys.argv) < 2: print(_("\nOpenStack masakari version: %(version)s\n") % {'version': version.version_string()}) print(script_name + " category action []") print(_("Available categories:")) for category in CATEGORIES: print(_("\t%s") % category) sys.exit(2) try: CONF(sys.argv[1:], project='masakari', version=version.version_string()) logging.setup(CONF, "masakari") python_logging.captureWarnings(True) except cfg.ConfigDirNotFoundError as details: print(_("Invalid directory: %s") % details) sys.exit(2) except cfg.ConfigFilesNotFoundError as e: cfg_files = ', '.join(e.config_files) print(_("Failed to read configuration file(s): %s") % cfg_files) sys.exit(2) fn = CONF.category.action_fn fn_args = fetch_func_args(fn) fn(*fn_args) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/cmd/status.py0000664000175100017510000000237715033036143017667 0ustar00mylesmyles# Copyright (c) 2018 NEC, Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from oslo_upgradecheck import common_checks from oslo_upgradecheck import upgradecheck from masakari import conf from masakari.i18n import _ CONF = conf.CONF class Checks(upgradecheck.UpgradeCommands): """Contains upgrade checks Various upgrade checks should be added as separate methods in this class and added to _upgrade_checks tuple. """ _upgrade_checks = ( (_('Policy File JSON to YAML Migration'), (common_checks.check_policy_json, {'conf': CONF})), ) def main(): return upgradecheck.main( CONF, project='masakari', upgrade_command=Checks()) if __name__ == '__main__': sys.exit(main()) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52671 masakari-19.1.0.dev18/masakari/common/0000775000175100017510000000000015033036146016511 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/common/__init__.py0000664000175100017510000000000015033036143020605 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/common/config.py0000664000175100017510000000446115033036143020332 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_middleware import cors from oslo_policy import opts def set_lib_defaults(): """Update default value for configuration options from other namespace. Example, oslo lib config options. This is needed for config generator tool to pick these default value changes. https://docs.openstack.org/oslo.config/latest/cli/ generator.html#modifying-defaults-from-other-namespaces """ set_middleware_defaults() # TODO(gmann): Remove setting the default value of config policy_file # once oslo_policy change the default value to 'policy.yaml'. # https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49 opts.set_defaults(cfg.CONF, 'policy.yaml') def set_middleware_defaults(): """Update default configuration options for oslo.middleware.""" # CORS Defaults cfg.set_defaults(cors.CORS_OPTS, allow_headers=['X-Auth-Token', 'X-Openstack-Request-Id', 'X-Identity-Status', 'X-Roles', 'X-Service-Catalog', 'X-User-Id', 'X-Tenant-Id'], expose_headers=['X-Auth-Token', 'X-Openstack-Request-Id', 'X-Subject-Token', 'X-Service-Token'], allow_methods=['GET', 'PUT', 'POST', 'DELETE', 'PATCH'] ) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52771 masakari-19.1.0.dev18/masakari/compute/0000775000175100017510000000000015033036146016675 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/compute/__init__.py0000664000175100017510000000136115033036143021004 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import importutils def API(): cls = importutils.import_class("masakari.compute.nova.API") return cls() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/compute/nova.py0000664000175100017510000002306715033036143020217 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Handles all requests to Nova. """ import functools import sys from keystoneauth1 import exceptions as keystone_exception import keystoneauth1.loading import keystoneauth1.loading.session from novaclient import api_versions from novaclient import client as nova_client from novaclient import exceptions as nova_exception from oslo_log import log as logging from requests import exceptions as request_exceptions from masakari import conf from masakari import context as ctx from masakari import exception from masakari import utils CONF = conf.CONF CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token') LOG = logging.getLogger(__name__) NOVA_API_VERSION = "2.53" nova_extensions = [ext for ext in nova_client.discover_extensions(NOVA_API_VERSION) if ext.name in ("list_extensions",)] def _reraise(desired_exc): utils.reraise(type(desired_exc), desired_exc, sys.exc_info()[2]) def translate_nova_exception(method): """Transforms a cinder exception but keeps its traceback intact.""" @functools.wraps(method) def wrapper(self, ctx, *args, **kwargs): try: res = method(self, ctx, *args, **kwargs) except (request_exceptions.Timeout, nova_exception.CommandError, keystone_exception.ConnectionError) as exc: err_msg = str(exc) _reraise(exception.MasakariException(reason=err_msg)) except (keystone_exception.BadRequest, nova_exception.BadRequest) as exc: err_msg = str(exc) _reraise(exception.InvalidInput(reason=err_msg)) except (keystone_exception.Forbidden, nova_exception.Forbidden) as exc: err_msg = str(exc) _reraise(exception.Forbidden(err_msg)) except (nova_exception.NotFound) as exc: err_msg = str(exc) _reraise(exception.NotFound(reason=err_msg)) except nova_exception.Conflict as exc: err_msg = str(exc) _reraise(exception.Conflict(reason=err_msg)) return res return wrapper def novaclient(context, timeout=None): """Returns a Nova client @param timeout: Number of seconds to wait for an answer before raising a Timeout exception (None to disable) """ nova_catalog_info = CONF.nova_catalog_admin_info service_type, service_name, endpoint_type = nova_catalog_info.split(':') context = ctx.RequestContext( CONF.os_privileged_user_name, None, auth_token=CONF.os_privileged_user_password, project_name=CONF.os_privileged_user_tenant, service_catalog=context.service_catalog, global_request_id=context.global_id) # User needs to authenticate to Keystone before querying Nova, so we set # auth_url to the identity service endpoint url = CONF.os_privileged_user_auth_url LOG.debug('Creating a Nova client using "%s" user', CONF.os_privileged_user_name) # Now that we have the correct auth_url, username, password and # project_name, let's build a Keystone session. loader = keystoneauth1.loading.get_plugin_loader( CONF.keystone_authtoken.auth_type) auth = loader.load_from_options( auth_url=url, username=context.user_id, password=context.auth_token, project_name=context.project_name, user_domain_name=CONF.os_user_domain_name, project_domain_name=CONF.os_project_domain_name, system_scope=CONF.os_system_scope) session_loader = keystoneauth1.loading.session.Session() keystone_session = session_loader.load_from_options( auth=auth, cacert=CONF.nova_ca_certificates_file, insecure=CONF.nova_api_insecure) client_obj = nova_client.Client( api_versions.APIVersion(NOVA_API_VERSION), session=keystone_session, insecure=CONF.nova_api_insecure, timeout=timeout, global_request_id=context.global_id, region_name=CONF.os_region_name, endpoint_type=endpoint_type, service_type=service_type, service_name=service_name, cacert=CONF.nova_ca_certificates_file, extensions=nova_extensions) return client_obj class API(object): """API for interacting with novaclient.""" @translate_nova_exception def get_servers(self, context, host): """Get a list of servers running on a specified host.""" opts = { 'host': host, 'all_tenants': True } nova = novaclient(context) LOG.info('Fetch Server list on %s', host) return nova.servers.list(detailed=True, search_opts=opts) @translate_nova_exception def enable_disable_service(self, context, host_name, enable=False, reason=None): """Enable or disable the service specified by nova service id.""" nova = novaclient(context) service = nova.services.list(host=host_name, binary='nova-compute')[0] if not enable: LOG.info('Disable nova-compute on %s', host_name) if reason: nova.services.disable_log_reason(service.id, reason) else: nova.services.disable(service.id) else: LOG.info('Enable nova-compute on %s', host_name) nova.services.enable(service.id) @translate_nova_exception def is_service_disabled(self, context, host_name, binary): """Check whether service is enabled or disabled on given host.""" nova = novaclient(context) service = nova.services.list(host=host_name, binary=binary)[0] return service.status == 'disabled' @translate_nova_exception def evacuate_instance(self, context, uuid, target=None): """Evacuate an instance from failed host to specified host.""" msg = ('Call evacuate command for instance %(uuid)s on host ' '%(target)s') LOG.info(msg, {'uuid': uuid, 'target': target}) nova = novaclient(context) nova.servers.evacuate(uuid, host=target) @translate_nova_exception def reset_instance_state(self, context, uuid, status='error'): """Reset the state of an instance to active or error.""" msg = ('Call reset state command on instance %(uuid)s to ' 'status: %(status)s.') LOG.info(msg, {'uuid': uuid, 'status': status}) nova = novaclient(context) nova.servers.reset_state(uuid, status) @translate_nova_exception def get_server(self, context, uuid): """Get a server.""" nova = novaclient(context) msg = ('Call get server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.get(uuid) @translate_nova_exception def stop_server(self, context, uuid): """Stop a server.""" nova = novaclient(context) msg = ('Call stop server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.stop(uuid) @translate_nova_exception def start_server(self, context, uuid): """Start a server.""" nova = novaclient(context) msg = ('Call start server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.start(uuid) @translate_nova_exception def get_aggregate_list(self, context): """Get all aggregate list.""" nova = novaclient(context) LOG.info('Call aggregate-list command to get list of all aggregates.') return nova.aggregates.list() @translate_nova_exception def add_host_to_aggregate(self, context, host, aggregate): """Add host to given aggregate.""" nova = novaclient(context) msg = ("Call add_host command to add host '%(host_name)s' to " "aggregate '%(aggregate_name)s'.") LOG.info(msg, {'host_name': host, 'aggregate_name': aggregate.name}) return nova.aggregates.add_host(aggregate.id, host) @translate_nova_exception def lock_server(self, context, uuid): """Lock a server.""" nova = novaclient(context) msg = ('Call lock server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.lock(uuid) @translate_nova_exception def unlock_server(self, context, uuid): """Unlock a server.""" nova = novaclient(context) msg = ('Call unlock server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.unlock(uuid) @translate_nova_exception def find_compute_service(self, context, compute_name): """Find compute service with case sensitive hostname.""" nova = novaclient(context) msg = ("Call compute service find command to get list of matching " "hypervisor name '%(compute_name)s'") LOG.info(msg, {'compute_name': compute_name}) computes = \ nova.services.list(binary='nova-compute', host=compute_name) if len(computes) == 0: raise exception.ComputeNotFoundByName( compute_name=compute_name) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52771 masakari-19.1.0.dev18/masakari/conf/0000775000175100017510000000000015033036146016146 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/__init__.py0000664000175100017510000000267515033036143020266 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from masakari.conf import api from masakari.conf import base from masakari.conf import coordination from masakari.conf import database from masakari.conf import engine from masakari.conf import engine_driver from masakari.conf import exceptions from masakari.conf import nova from masakari.conf import osapi_v1 from masakari.conf import paths from masakari.conf import service from masakari.conf import ssl from masakari.conf import wsgi CONF = cfg.CONF api.register_opts(CONF) base.register_opts(CONF) coordination.register_opts(CONF) database.register_opts(CONF) engine.register_opts(CONF) engine_driver.register_opts(CONF) exceptions.register_opts(CONF) nova.register_opts(CONF) osapi_v1.register_opts(CONF) paths.register_opts(CONF) ssl.register_opts(CONF) service.register_opts(CONF) wsgi.register_opts(CONF) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/api.py0000664000175100017510000000501515033036143017267 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg auth_opts = [ cfg.StrOpt("auth_strategy", default="keystone", choices=("keystone", "noauth2"), help=""" This determines the strategy to use for authentication: keystone or noauth2. 'noauth2' is designed for testing only, as it does no actual credential checking. 'noauth2' provides administrative credentials only if 'admin' is specified as the username. * Possible values: Either 'keystone' (default) or 'noauth2'. * Services that use this: ``masakari-api`` * Related options: None """), cfg.BoolOpt("use_forwarded_for", default=False, help=""" When True, the 'X-Forwarded-For' header is treated as the canonical remote address. When False (the default), the 'remote_address' header is used. You should only enable this if you have an HTML sanitizing proxy. * Possible values: True, False (default) * Services that use this: ``masakari-api`` * Related options: None """), ] osapi_opts = [ cfg.IntOpt("osapi_max_limit", default=1000, help=""" As a query can potentially return many thousands of items, you can limit the maximum number of items in a single response by setting this option. * Possible values: Any positive integer. Default is 1000. * Services that use this: ``masakari-api`` * Related options: None """), cfg.StrOpt("osapi_masakari_link_prefix", help=""" This string is prepended to the normal URL that is returned in links to the OpenStack Masakari API. If it is empty (the default), the URLs are returned unchanged. * Possible values: Any string, including an empty string (the default). * Services that use this: ``masakari-api`` * Related options: None """), ] ALL_OPTS = (auth_opts + osapi_opts) def register_opts(conf): conf.register_opts(ALL_OPTS) def list_opts(): return {"DEFAULT": ALL_OPTS} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/base.py0000664000175100017510000000272615033036143017436 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg base_options = [ cfg.StrOpt( 'tempdir', help='Explicitly specify the temporary working directory.'), cfg.BoolOpt( 'monkey_patch', default=False, help=""" Determine if monkey patching should be applied. Related options: * ``monkey_patch_modules``: This must have values set for this option to have any effect """), cfg.ListOpt( 'monkey_patch_modules', default=['masakari.api:masakari.cmd'], help=""" List of modules/decorators to monkey patch. This option allows you to patch a decorator for all functions in specified modules. Related options: * ``monkey_patch``: This must be set to ``True`` for this option to have any effect """), ] def register_opts(conf): conf.register_opts(base_options) def list_opts(): return {'DEFAULT': base_options} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/coordination.py0000664000175100017510000000274115033036143021211 0ustar00mylesmyles# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg coordination_opts = [ cfg.StrOpt('backend_url', default=None, help="The backend URL to use for distributed coordination." "By default it's None which means that coordination is " "disabled. The coordination is implemented for " "distributed lock management and was tested with etcd." "Coordination doesn't work for file driver because lock " "files aren't removed after lock releasing."), ] def register_opts(conf): """Registers coordination configuration options :param conf: configuration """ conf.register_opts(coordination_opts, group="coordination") def list_opts(): """Lists coordination configuration options :return: coordination configuration options """ return {"coordination": coordination_opts} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/database.py0000664000175100017510000000165315033036143020266 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.conf import paths from oslo_db import options as oslo_db_options _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def( 'masakari.sqlite') def register_opts(conf): oslo_db_options.set_defaults(conf, connection=_DEFAULT_SQL_CONNECTION) def list_opts(): return {'DEFAULT': []} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/engine.py0000664000175100017510000001101115033036143017754 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg rpcapi_opts = [ cfg.StrOpt("masakari_topic", default="ha_engine", deprecated_for_removal=True, deprecated_since='3.0.0', deprecated_reason=""" Configurable RPC topic provides little value and it can easily break Masakari if operator configures it to the same topic used by other OpenStack services.""", help=""" This is the message queue topic that the masakari engine 'listens' on. It is used when the masakari engine is started up to configure the queue, and whenever an RPC call to the masakari engine is made. * Possible values: Any string, but there is almost never any reason to ever change this value from its default of 'engine'. * Services that use this: ``masakari-engine`` * Related options: None """), ] driver_opts = [ cfg.StrOpt( 'notification_driver', default='taskflow_driver', help=""" Defines which driver to use for executing notification workflows. """), ] notification_opts = [ cfg.IntOpt('duplicate_notification_detection_interval', default=180, min=0, help="Interval in seconds for identifying duplicate " "notifications. If the notification received is identical " "to the previous ones whose status is either new or " "running and if it's created_timestamp and the current " "timestamp is less than this config option value, then " "the notification will be considered as duplicate and " "it will be ignored." ), cfg.IntOpt('wait_period_after_service_update', default=180, help='Number of seconds to wait after a service is enabled ' 'or disabled.'), cfg.IntOpt('wait_period_after_evacuation', default=90, help='Wait until instance is evacuated'), cfg.IntOpt('verify_interval', default=1, help='The monitoring interval for looping'), cfg.IntOpt('wait_period_after_power_off', default=180, help='Number of seconds to wait for instance to shut down'), cfg.IntOpt('wait_period_after_power_on', default=60, help='Number of seconds to wait for instance to start'), cfg.IntOpt('process_unfinished_notifications_interval', default=120, help='Interval in seconds for processing notifications which ' 'are in error or new state.'), cfg.IntOpt('retry_notification_new_status_interval', default=60, mutable=True, help="Interval in seconds for identifying notifications which " "are in new state. If the notification is in new state " "till this config option value after it's " "generated_time, then it is considered that notification " "is ignored by the messaging queue and will be processed " "by 'process_unfinished_notifications' periodic task."), cfg.IntOpt('check_expired_notifications_interval', default=600, help='Interval in seconds for checking running notifications.'), cfg.IntOpt('notifications_expired_interval', default=86400, help='Interval in seconds for identifying running ' 'notifications expired.'), cfg.IntOpt('host_failure_recovery_threads', default=3, min=1, help="Number of threads to be used for evacuating and " "confirming instances during execution of host_failure " "workflow."), ] ALL_OPTS = (rpcapi_opts + notification_opts + driver_opts) def register_opts(conf): conf.register_opts(ALL_OPTS) def list_opts(): return {'DEFAULT': ALL_OPTS} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/engine_driver.py0000664000175100017510000002542215033036143021342 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_config import types instance_recovery_group = cfg.OptGroup( 'instance_failure', title='Instance failure recovery options', help="Configuration options for instance failure recovery") host_recovery_group = cfg.OptGroup( 'host_failure', title='Host failure recovery options', help="Configuration options for host failure recovery") process_recovery_group = cfg.OptGroup( 'process_failure', title='Process failure recovery options', help="Configuration options for process failure recovery") customized_recovery_flow_group = cfg.OptGroup( 'taskflow_driver_recovery_flows', title='Customized recovery flow Options', help="Configuration options for customizing various failure recovery" "workflow tasks.") taskflow_group = cfg.OptGroup( 'taskflow', title='Taskflow driver options', help="Configuration options for taskflow driver") host_failure_opts = [ cfg.BoolOpt('evacuate_all_instances', default=True, help=""" Operators can decide whether all instances or only those instances which have ``[host_failure]\\ha_enabled_instance_metadata_key`` set to ``True`` should be allowed for evacuation from a failed source compute node. When set to True, it will evacuate all instances from a failed source compute node. First preference will be given to those instances which have ``[host_failure]\\ha_enabled_instance_metadata_key`` set to ``True``, and then it will evacuate the remaining ones. When set to False, it will evacuate only those instances which have ``[host_failure]\\ha_enabled_instance_metadata_key`` set to ``True``. """), cfg.StrOpt('ha_enabled_instance_metadata_key', default='HA_Enabled', help=""" Operators can decide on the instance metadata key naming that affects the per-instance behaviour of ``[host_failure]\\evacuate_all_instances``. The default is the same for both failure types (host, instance) but the value can be overridden to make the metadata key different per failure type. """), cfg.BoolOpt('ignore_instances_in_error_state', default=False, help=""" Operators can decide whether error instances should be allowed for evacuation from a failed source compute node or not. When set to True, it will ignore error instances from evacuation from a failed source compute node. When set to False, it will evacuate error instances along with other instances from a failed source compute node."""), cfg.BoolOpt("add_reserved_host_to_aggregate", default=False, help=""" Operators can decide whether reserved_host should be added to aggregate group of failed compute host. When set to True, reserved host will be added to the aggregate group of failed compute host. When set to False, the reserved_host will not be added to the aggregate group of failed compute host."""), cfg.StrOpt("service_disable_reason", default="Masakari detected host failed.", help="Compute disable reason in case Masakari detects host " "failure."), ] instance_failure_options = [ cfg.BoolOpt('process_all_instances', default=False, help=""" Operators can decide whether all instances or only those instances which have ``[instance_failure]\\ha_enabled_instance_metadata_key`` set to ``True`` should be taken into account to recover from instance failure events. When set to True, it will execute instance failure recovery actions for an instance irrespective of whether that particular instance has ``[instance_failure]\\ha_enabled_instance_metadata_key`` set to ``True``. When set to False, it will only execute instance failure recovery actions for an instance which has ``[instance_failure]\\ha_enabled_instance_metadata_key`` set to ``True``. """), cfg.StrOpt('ha_enabled_instance_metadata_key', default='HA_Enabled', help=""" Operators can decide on the instance metadata key naming that affects the per-instance behaviour of ``[instance_failure]\\process_all_instances``. The default is the same for both failure types (host, instance) but the value can be overridden to make the metadata key different per failure type. """), ] taskflow_options = [ cfg.StrOpt('connection', help=""" The SQLAlchemy connection string to use to connect to the taskflow database. """), ] taskflow_driver_recovery_flows = [ cfg.Opt('host_auto_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['disable_compute_service_task'], 'main': ['prepare_HA_enabled_instances_task'], 'post': ['evacuate_instances_task']}, help=(""" This option allows operator to customize tasks to be executed for host failure auto recovery workflow. Provide list of strings reflecting to the task classes that should be included to the host failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below three tasks will be part of this config option:- 1. disable_compute_service_task 2. prepare_HA_enabled_instances_task 3. evacuate_instances_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")), cfg.Opt('host_rh_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['disable_compute_service_task'], 'main': ['prepare_HA_enabled_instances_task', 'evacuate_instances_task'], 'post': []}, help=(""" This option allows operator to customize tasks to be executed for host failure reserved_host recovery workflow. Provide list of strings reflecting to the task classes that should be included to the host failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below three tasks will be part of this config option:- 1. disable_compute_service_task 2. prepare_HA_enabled_instances_task 3. evacuate_instances_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")), cfg.Opt('instance_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['stop_instance_task'], 'main': ['start_instance_task'], 'post': ['confirm_instance_active_task']}, help=(""" This option allows operator to customize tasks to be executed for instance failure recovery workflow. Provide list of strings reflecting to the task classes that should be included to the instance failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below three tasks will be part of this config option:- 1. stop_instance_task 2. start_instance_task 3. confirm_instance_active_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")), cfg.Opt('process_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['disable_compute_node_task'], 'main': ['confirm_compute_node_disabled_task'], 'post': []}, help=(""" This option allows operator to customize tasks to be executed for process failure recovery workflow. Provide list of strings reflecting to the task classes that should be included to the process failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below two tasks will be part of this config option:- 1. disable_compute_node_task 2. confirm_compute_node_disabled_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")) ] process_failure_opts = [ cfg.StrOpt("service_disable_reason", default="Masakari detected process failed.", help="Compute disable reason in case Masakari detects process " "failure."), ] def register_opts(conf): conf.register_group(instance_recovery_group) conf.register_group(host_recovery_group) conf.register_group(process_recovery_group) conf.register_group(customized_recovery_flow_group) conf.register_group(taskflow_group) conf.register_opts(instance_failure_options, group=instance_recovery_group) conf.register_opts(host_failure_opts, group=host_recovery_group) conf.register_opts(process_failure_opts, group=process_recovery_group) conf.register_opts(taskflow_driver_recovery_flows, group=customized_recovery_flow_group) conf.register_opts(taskflow_options, group=taskflow_group) def list_opts(): return { instance_recovery_group.name: instance_failure_options, host_recovery_group.name: host_failure_opts, process_recovery_group.name: process_failure_opts, taskflow_group.name: taskflow_options } def customized_recovery_flow_list_opts(): return { customized_recovery_flow_group.name: taskflow_driver_recovery_flows } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/exceptions.py0000664000175100017510000000165415033036143020704 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg exc_log_opts = [ cfg.BoolOpt('fatal_exception_format_errors', default=False, help='Make exception message format errors fatal'), ] def register_opts(conf): conf.register_opts(exc_log_opts) def list_opts(): return {'DEFAULT': exc_log_opts} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/nova.py0000664000175100017510000000540715033036143017466 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from keystoneauth1 import loading as ks_loading from oslo_config import cfg nova_opts = [ cfg.StrOpt('nova_catalog_admin_info', default='compute:nova:publicURL', help='Match this value when searching for nova in the ' 'service catalog. Format is: separated values of ' 'the form: ' '::'), cfg.StrOpt('os_region_name', help='Region name of this node'), cfg.StrOpt('nova_ca_certificates_file', help='Location of ca certificates file to use for nova client ' 'requests.'), cfg.BoolOpt('nova_api_insecure', default=False, help='Allow to perform insecure SSL requests to nova'), cfg.StrOpt('os_privileged_user_name', help='OpenStack privileged account username. Used for requests ' 'to other services (such as Nova) that require an account ' 'with special rights.'), cfg.StrOpt('os_privileged_user_password', help='Password associated with the OpenStack privileged ' 'account.', secret=True), cfg.StrOpt('os_privileged_user_tenant', help='Tenant name associated with the OpenStack privileged ' 'account.'), cfg.URIOpt('os_privileged_user_auth_url', help='Auth URL associated with the OpenStack privileged ' 'account.'), cfg.StrOpt('os_user_domain_name', default="default", help='User domain name associated with the OpenStack ' 'privileged account.'), cfg.StrOpt('os_project_domain_name', default="default", help='Project domain name associated with the OpenStack ' 'privileged account.'), cfg.StrOpt('os_system_scope', help='Scope for system operations.'), ] def register_opts(conf): conf.register_opts(nova_opts) ks_loading.register_session_conf_options(conf, 'DEFAULT') def list_opts(): return { 'DEFAULT': nova_opts } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/opts.py0000664000175100017510000000642015033036143017504 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ This is the single point of entry to generate the sample configuration file for Masakari. It collects all the necessary info from the other modules in this package. It is assumed that: * every other module in this package has a 'list_opts' function which return a dict where * the keys are strings which are the group names * the value of each key is a list of config options for that group * the masakari.conf package doesn't have further packages with config options * this module is only used in the context of sample file generation """ import collections import importlib import os import pkgutil from masakari.conf import engine_driver LIST_OPTS_FUNC_NAME = "list_opts" _recovery_workflow_opts = [ ('taskflow_driver_recovery_flows', engine_driver.taskflow_driver_recovery_flows) ] def _tupleize(dct): """Take the dict of options and convert to the 2-tuple format.""" return [(key, val) for key, val in dct.items()] def list_opts(): opts = collections.defaultdict(list) module_names = _list_module_names() imported_modules = _import_modules(module_names) _append_config_options(imported_modules, opts) return _tupleize(opts) def list_recovery_workflow_opts(): """Return a list of oslo_config options available for recovery workflow""" return [(key, val) for key, val in _recovery_workflow_opts] def _list_module_names(): module_names = [] package_path = os.path.dirname(os.path.abspath(__file__)) for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]): if modname == "opts" or ispkg: continue else: module_names.append(modname) return module_names def _import_modules(module_names): imported_modules = [] for modname in module_names: mod = importlib.import_module("masakari.conf." + modname) if not hasattr(mod, LIST_OPTS_FUNC_NAME): msg = "The module 'masakari.conf.%s' should have a '%s' "\ "function which returns the config options." % \ (modname, LIST_OPTS_FUNC_NAME) raise Exception(msg) else: imported_modules.append(mod) return imported_modules def _process_old_opts(configs): """Convert old-style 2-tuple configs to dicts.""" if isinstance(configs, tuple): configs = [configs] return {label: options for label, options in configs} def _append_config_options(imported_modules, config_options): for mod in imported_modules: configs = mod.list_opts() if not isinstance(configs, dict): configs = _process_old_opts(configs) for key, val in configs.items(): config_options[key].extend(val) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/osapi_v1.py0000664000175100017510000000574215033036143020246 0ustar00mylesmyles# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg api_opts = [ cfg.ListOpt("extensions_blacklist", default=[], deprecated_for_removal=True, deprecated_group="osapi_v1", help=""" *DEPRECATED* This option is a list of all of the v2.1 API extensions to never load. However, it will be removed in the near future, after which the all the functionality that was previously in extensions will be part of the standard API, and thus always accessible. * Possible values: A list of strings, each being the alias of an extension that you do not wish to load. * Services that use this: ``masakari-api`` * Related options: enabled, extensions_whitelist """), cfg.ListOpt("extensions_whitelist", default=[], deprecated_for_removal=True, deprecated_group="osapi_v1", help=""" *DEPRECATED* This is a list of extensions. If it is empty, then *all* extensions except those specified in the extensions_blacklist option will be loaded. If it is not empty, then only those extensions in this list will be loaded, provided that they are also not in the extensions_blacklist option. Once this deprecated option is removed, after which the all the functionality that was previously in extensions will be part of the standard API, and thus always accessible. * Possible values: A list of strings, each being the alias of an extension that you wish to load, or an empty list, which indicates that all extensions are to be run. * Services that use this: ``masakari-api`` * Related options: enabled, extensions_blacklist """), cfg.StrOpt("project_id_regex", default=None, deprecated_for_removal=True, deprecated_group="osapi_v1", help=""" *DEPRECATED* This option is a string representing a regular expression (regex) that matches the project_id as contained in URLs. If not set, it will match normal UUIDs created by keystone. * Possible values: A string representing any legal regular expression * Services that use this: ``masakari-api`` * Related options: None """), ] api_opts_group = cfg.OptGroup(name="osapi_v1", title="API v1 Options") def register_opts(conf): conf.register_group(api_opts_group) conf.register_opts(api_opts, api_opts_group) def list_opts(): return {api_opts_group: api_opts} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/paths.py0000664000175100017510000000331515033036143017636 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys from oslo_config import cfg path_opts = [ cfg.StrOpt('pybasedir', default=os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')), help='Directory where the masakari python module is installed'), cfg.StrOpt('bindir', default=os.path.join(sys.prefix, 'local', 'bin'), help='Directory where masakari binaries are installed'), cfg.StrOpt('state_path', default='$pybasedir', help="Top-level directory for maintaining masakari's state"), ] def basedir_def(*args): """Return an uninterpolated path relative to $pybasedir.""" return os.path.join('$pybasedir', *args) def bindir_def(*args): """Return an uninterpolated path relative to $bindir.""" return os.path.join('$bindir', *args) def state_path_def(*args): """Return an uninterpolated path relative to $state_path.""" return os.path.join('$state_path', *args) def register_opts(conf): conf.register_opts(path_opts) def list_opts(): return {"DEFAULT": path_opts} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/service.py0000664000175100017510000000524515033036143020163 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import socket from oslo_config import cfg service_opts = [ cfg.HostAddressOpt('host', default=socket.gethostname(), help=''' Hostname, FQDN or IP address of this host. Must be valid within AMQP key. Possible values: * String with hostname, FQDN or IP address. Default is hostname of this host. '''), cfg.StrOpt('engine_manager', default='masakari.engine.manager.MasakariManager', help='Full class name for the Manager for masakari engine'), cfg.IntOpt('report_interval', default=10, help='Seconds between nodes reporting state to datastore'), cfg.BoolOpt('periodic_enable', default=True, help='Enable periodic tasks'), cfg.IntOpt('periodic_interval_max', default=300, help='Max interval time between periodic tasks execution in ' 'seconds.'), cfg.IntOpt('periodic_fuzzy_delay', default=60, help='Range of seconds to randomly delay when starting the' ' periodic task scheduler to reduce stampeding.' ' (Disable by setting to 0)'), cfg.BoolOpt('use_ssl', default=False, help='Use APIs with SSL enabled'), cfg.HostAddressOpt('masakari_api_listen', default="0.0.0.0", help='The IP address on which the Masakari API will listen.'), cfg.IntOpt('masakari_api_listen_port', default=15868, min=1, max=65535, help='The port on which the Masakari API will listen.'), cfg.IntOpt('masakari_api_workers', help='Number of workers for Masakari API service. The default ' 'will be the number of CPUs available.'), cfg.IntOpt('service_down_time', default=60, help='Maximum time since last check-in for up service'), ] def register_opts(conf): conf.register_opts(service_opts) def list_opts(): return {'DEFAULT': service_opts} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/ssl.py0000664000175100017510000000140115033036143017312 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_service import sslutils def register_opts(conf): sslutils.register_opts(conf) def list_opts(): return sslutils.list_opts() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/conf/wsgi.py0000664000175100017510000001000715033036143017464 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg wsgi_group = cfg.OptGroup( 'wsgi', title='WSGI Options') api_paste_config = cfg.StrOpt( 'api_paste_config', default="api-paste.ini", help='File name for the paste.deploy config for masakari-api', deprecated_group='DEFAULT') # TODO(abhishekk): It is not possible to rename this to 'log_format' # yet, as doing so would cause a conflict if '[DEFAULT] log_format' # were used. When 'deprecated_group' is removed after Ocata, this # should be changed. wsgi_log_format = cfg.StrOpt( 'wsgi_log_format', default='%(client_ip)s "%(request_line)s" status: %(status_code)s' ' len: %(body_length)s time: %(wall_seconds).7f', help='A python format string that is used as the template to ' 'generate log lines. The following values can be formatted ' 'into it: client_ip, date_time, request_line, status_code, ' 'body_length, wall_seconds.', deprecated_group='DEFAULT') secure_proxy_ssl_header = cfg.StrOpt( 'secure_proxy_ssl_header', help='The HTTP header used to determine the scheme for the ' 'original request, even if it was removed by an SSL ' 'terminating proxy. Typical value is ' '"HTTP_X_FORWARDED_PROTO".', deprecated_group='DEFAULT') ssl_ca_file = cfg.StrOpt( 'ssl_ca_file', help='CA certificate file to use to verify connecting clients', deprecated_group='DEFAULT') ssl_cert_file = cfg.StrOpt( 'ssl_cert_file', help='SSL certificate of API server', deprecated_group='DEFAULT') ssl_key_file = cfg.StrOpt( 'ssl_key_file', help='SSL private key of API server', deprecated_group='DEFAULT') tcp_keepidle = cfg.IntOpt( 'tcp_keepidle', default=600, help='Sets the value of TCP_KEEPIDLE in seconds for each ' 'server socket. Not supported on OS X.', deprecated_group='DEFAULT') default_pool_size = cfg.IntOpt( 'default_pool_size', default=1000, help='Size of the pool of greenthreads used by wsgi', deprecated_group='DEFAULT', deprecated_name='wsgi_default_pool_size') max_header_line = cfg.IntOpt( 'max_header_line', default=16384, help='Maximum line size of message headers to be accepted. ' 'max_header_line may need to be increased when using ' 'large tokens (typically those generated by the ' 'Keystone v3 API with big service catalogs).', deprecated_group='DEFAULT') keep_alive = cfg.BoolOpt( 'keep_alive', default=True, help='If False, closes the client socket connection explicitly.', deprecated_group='DEFAULT', deprecated_name='wsgi_keep_alive') client_socket_timeout = cfg.IntOpt( 'client_socket_timeout', default=900, help="Timeout for client connections' socket operations. " "If an incoming connection is idle for this number of " "seconds it will be closed. A value of '0' means " "wait forever.", deprecated_group='DEFAULT') ALL_OPTS = [api_paste_config, wsgi_log_format, secure_proxy_ssl_header, ssl_ca_file, ssl_cert_file, ssl_key_file, tcp_keepidle, default_pool_size, max_header_line, keep_alive, client_socket_timeout ] def register_opts(conf): conf.register_group(wsgi_group) conf.register_opts(ALL_OPTS, group=wsgi_group) def list_opts(): return {wsgi_group: ALL_OPTS} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/config.py0000664000175100017510000000271115033036143017036 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log from masakari.common import config import masakari.conf from masakari.db.sqlalchemy import api as sqlalchemy_api from masakari import rpc from masakari import version CONF = masakari.conf.CONF def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Masakari needs log.set_defaults(default_log_levels=log.get_default_log_levels()) rpc.set_defaults(control_exchange='masakari') config.set_middleware_defaults() CONF(argv[1:], project='masakari', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/context.py0000664000175100017510000002234515033036143017262 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ RequestContext: context for requests that persist through all of masakari. """ import copy from keystoneauth1.access import service_catalog as ksa_service_catalog from keystoneauth1 import plugin from oslo_context import context from oslo_db.sqlalchemy import enginefacade from oslo_log import log as logging from oslo_utils import timeutils from masakari import exception from masakari.i18n import _ from masakari import policy from masakari import utils LOG = logging.getLogger(__name__) class _ContextAuthPlugin(plugin.BaseAuthPlugin): """A keystoneauth auth plugin that uses the values from the Context. Ideally we would use the plugin provided by auth_token middleware however this plugin isn't serialized yet so we construct one from the serialized auth data. """ def __init__(self, auth_token, sc): super(_ContextAuthPlugin, self).__init__() self.auth_token = auth_token self.service_catalog = ksa_service_catalog.ServiceCatalogV2(sc) def get_token(self, *args, **kwargs): return self.auth_token def get_endpoint(self, session, service_type=None, interface=None, region_name=None, service_name=None, **kwargs): return self.service_catalog.url_for(service_type=service_type, service_name=service_name, interface=interface, region_name=region_name) @enginefacade.transaction_context_provider class RequestContext(context.RequestContext): """Security context and request information. Represents the user taking a given action within the system. """ FROM_DICT_EXTRA_KEYS = [ 'read_deleted', 'remote_address', 'timestamp', 'service_catalog', ] def __init__(self, user_id=None, project_id=None, is_admin=None, read_deleted="no", roles=None, remote_address=None, timestamp=None, request_id=None, auth_token=None, overwrite=True, user_name=None, project_name=None, service_catalog=None, user_auth_plugin=None, **kwargs): """:param read_deleted: 'no' indicates deleted records are hidden, 'yes' indicates deleted records are visible, 'only' indicates that *only* deleted records are visible. :param overwrite: Set to False to ensure that the greenthread local copy of the index is not overwritten. :param user_auth_plugin: The auth plugin for the current request's authentication data. :param kwargs: Extra arguments that might be present, but we ignore because they possibly came in from older rpc messages. """ user = kwargs.pop('user', None) tenant = kwargs.pop('tenant', None) super(RequestContext, self).__init__( auth_token=auth_token, user_id=user_id or user, project_id=project_id or tenant, domain_id=kwargs.pop('domain', None), user_domain_id=kwargs.pop('user_domain', None), project_domain_id=kwargs.pop('project_domain', None), is_admin=is_admin, read_only=kwargs.pop('read_only', False), show_deleted=kwargs.pop('show_deleted', False), request_id=request_id, resource_uuid=kwargs.pop('resource_uuid', None), overwrite=overwrite, roles=roles, user_name=user_name, project_name=project_name, is_admin_project=kwargs.pop('is_admin_project', True), global_request_id=kwargs.pop('global_request_id', None)) # oslo_context's RequestContext.to_dict() generates this field, we can # safely ignore this as we don't use it. kwargs.pop('user_identity', None) if kwargs: LOG.debug('Arguments dropped when creating context: %s', str(kwargs)) if read_deleted is None: # If we did not get a value for read_deleted, ensure we default # it to "no" as code expects it to be a string. self.read_deleted = 'no' else: self.read_deleted = read_deleted self.remote_address = remote_address if not timestamp: timestamp = timeutils.utcnow() if isinstance(timestamp, str): timestamp = timeutils.parse_strtime(timestamp) self.timestamp = timestamp if service_catalog: # Only include required parts of service_catalog self.service_catalog = [ s for s in service_catalog if s.get('type') in ( 'compute', 'identity')] else: # if list is empty or none self.service_catalog = [] self.user_auth_plugin = user_auth_plugin if self.is_admin is None: self.is_admin = policy.check_is_admin(self) def get_auth_plugin(self): if self.user_auth_plugin: return self.user_auth_plugin else: return _ContextAuthPlugin(self.auth_token, self.service_catalog) def _get_read_deleted(self): return self._read_deleted def _set_read_deleted(self, read_deleted): if read_deleted not in ('no', 'yes', 'only'): raise ValueError(_("read_deleted can only be one of 'no', " "'yes' or 'only', not %r") % read_deleted) self._read_deleted = read_deleted def _del_read_deleted(self): del self._read_deleted read_deleted = property(_get_read_deleted, _set_read_deleted, _del_read_deleted) def to_dict(self): values = super(RequestContext, self).to_dict() # FIXME: defensive hasattr() checks need to be # removed once we figure out why we are seeing stack # traces values.update({ 'user_id': getattr(self, 'user_id', None), 'project_id': getattr(self, 'project_id', None), 'read_deleted': getattr(self, 'read_deleted', 'no'), 'remote_address': getattr(self, 'remote_address', None), 'timestamp': utils.strtime(self.timestamp) if hasattr( self, 'timestamp') else None, 'user_name': getattr(self, 'user_name', None), 'service_catalog': getattr(self, 'service_catalog', None), 'project_name': getattr(self, 'project_name', None) }) return values def elevated(self, read_deleted=None): """Return a version of this context with admin flag set.""" context = copy.copy(self) # context.roles must be deepcopied to leave original roles # without changes context.roles = copy.deepcopy(self.roles) context.is_admin = True if 'admin' not in context.roles: context.roles.append('admin') if read_deleted is not None: context.read_deleted = read_deleted return context def can(self, action, target=None, fatal=True): """Verifies that the given action is valid on the target in this context. :param action: string representing the action to be checked. :param target: dictionary representing the object of the action for object creation this should be a dictionary representing the location of the object e.g. ``{'project_id': context.project_id}``. If None, then this default target will be considered: {'project_id': self.project_id, 'user_id': self.user_id} :param fatal: if False, will return False when an exception.Forbidden occurs. :raises masakari.exception.Forbidden: if verification fails and fatal is True. :return: returns a non-False value (not necessarily "True") if authorized and False if not authorized and fatal is False. """ if target is None: target = {'project_id': self.project_id, 'user_id': self.user_id} try: return policy.authorize(self, action, target) except exception.Forbidden: if fatal: raise return False def to_policy_values(self): policy = super(RequestContext, self).to_policy_values() policy['is_admin'] = self.is_admin return policy def __str__(self): return "" % self.to_dict() def get_admin_context(read_deleted="no"): return RequestContext(user_id=None, project_id=None, is_admin=True, read_deleted=read_deleted, overwrite=False) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/coordination.py0000664000175100017510000001205415033036143020262 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Coordination and locking utilities.""" import inspect import uuid import decorator from oslo_config import cfg from oslo_log import log from oslo_utils import timeutils from tooz import coordination LOG = log.getLogger(__name__) class Coordinator(object): """Tooz coordination wrapper. Coordination member id is created from concatenated `prefix` and `agent_id` parameters. :param str agent_id: Agent identifier :param str prefix: Used to provide member identifier with a meaningful prefix. """ def __init__(self, agent_id=None, prefix=''): self.coordinator = None self.agent_id = agent_id or str(uuid.uuid4()) self.started = False self.prefix = prefix def start(self): """Starts coordination :return: None """ if self.started: return # Tooz expects member_id as a byte string. member_id = (self.prefix + self.agent_id).encode('ascii') self.coordinator = coordination.get_coordinator( cfg.CONF.coordination.backend_url, member_id) self.coordinator.start(start_heart=True) self.started = True def stop(self): """Disconnect from coordination backend and stop heartbeat.""" if self.started: self.coordinator.stop() self.coordinator = None self.started = False def get_lock(self, name): """Return a Tooz backend lock. :param str name: The lock name that is used to identify it across all nodes. """ # Tooz expects lock name as a byte string. lock_name = (self.prefix + name).encode('ascii') if cfg.CONF.coordination.backend_url: return self.coordinator.get_lock(lock_name) LOG.debug("Cannot get lock %s, because coordination is not configured", lock_name) COORDINATOR = Coordinator(prefix='masakari-') def synchronized(lock_name, blocking=True, coordinator=COORDINATOR): """Synchronization decorator. :param str lock_name: Lock name. :param blocking: If True, blocks until the lock is acquired. If False, raises exception when not acquired. Otherwise, the value is used as a timeout value and if lock is not acquired after this number of seconds exception is raised. :param coordinator: Coordinator class to use when creating lock. Defaults to the global coordinator. :raises tooz.coordination.LockAcquireFailed: if lock is not acquired Decorating a method like so:: @synchronized('my_lock') def foo(self, *args): ... ensures that only one process will execute the foo method at a time. Different methods can share the same lock:: @synchronized('my_lock') def foo(self, *args): ... @synchronized('my_lock') def bar(self, *args): ... This way only one of either foo or bar can be executing at a time. Lock name can be formatted using Python format string syntax:: @synchronized('{function_name}-{vol.id}-{snap[name]}') def foo(self, vol, snap): ... Available field names are: decorated function parameters and `function_name` as a decorated function name. """ @decorator.decorator def _synchronized(f, *a, **k): call_args = inspect.getcallargs(f, *a, **k) call_args['function_name'] = f.__name__ lock = coordinator.get_lock(lock_name.format(**call_args)) if lock: t1 = timeutils.now() t2 = None try: with lock(blocking): t2 = timeutils.now() LOG.debug('Lock "%(name)s" acquired by "%(function)s" :: ' 'waited %(wait_secs)0.3fs', {'name': lock.name, 'function': f.__name__, 'wait_secs': (t2 - t1)}) return f(*a, **k) finally: t3 = timeutils.now() if t2 is None: held_secs = "N/A" else: held_secs = "%0.3fs" % (t3 - t2) LOG.debug('Lock "%(name)s" released by "%(function)s" :: held ' '%(held_secs)s', {'name': lock.name, 'function': f.__name__, 'held_secs': held_secs}) else: return f(*a, **k) return _synchronized ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52771 masakari-19.1.0.dev18/masakari/db/0000775000175100017510000000000015033036146015606 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/__init__.py0000664000175100017510000000127315033036143017717 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ DB abstraction for Masakari """ from masakari.db.api import * # noqa ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/api.py0000664000175100017510000003653415033036143016741 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Defines interface for DB access. Functions in this module are imported into the masakari.db namespace. Call these functions from masakari.db namespace, not the masakari.db.api namespace. """ from oslo_db.api import DBAPI import masakari.conf CONF = masakari.conf.CONF _BACKEND_MAPPING = {'sqlalchemy': 'masakari.db.sqlalchemy.api'} IMPL = DBAPI.from_config(CONF, backend_mapping=_BACKEND_MAPPING) # The maximum value a signed INT type may have MAX_INT = 0x7FFFFFFF def get_engine(): """Returns database engine""" return IMPL.get_engine() def failover_segment_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all failover segments that match all filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all failover segments """ return IMPL.failover_segment_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def failover_segment_get_by_id(context, segment_id): """Get failover segment by id. :param context: context to query under :param segment_id: id of failover segment :returns: dictionary-like object containing failover segment :raises exception.FailoverSegmentNotFound if failover segment with given ID doesn't exist. """ return IMPL.failover_segment_get_by_id(context, segment_id) def failover_segment_get_by_uuid(context, segment_uuid): """Get failover segment by uuid. :param context: context to query under :param segment_uuid: uuid of failover segment :returns: dictionary-like object containing failover segment :raises exception.FailoverSegmentNotFound if failover segment with given 'segment_uuid' doesn't exist. """ return IMPL.failover_segment_get_by_uuid(context, segment_uuid) def failover_segment_get_by_name(context, name): """Get failover segment by name :param context: context: context to query under :param name: name of failover segment :returns: dictionary-like object containing failover segment :raises exception.FailoverSegmentNotFoundByName if failover segment with given 'name' doesn't exist. """ return IMPL.failover_segment_get_by_name(context, name) def failover_segment_create(context, values): """Insert failover segment to database. :param context: context to query under :param values: dictionary of failover segment attributes to create :returns: dictionary-like object containing created failover segment :raises exception.FailoverSegmentExists if failover segment with given name already exist. """ return IMPL.failover_segment_create(context, values) def failover_segment_update(context, segment_uuid, values): """Update failover segment by uuid. :param context: context to query under :param segment_uuid: uuid of segment to be updated :param values: dictionary of values to be updated :returns: dictionary-like object containing updated failover segment :raises exception.FailoverSegmentNotFound if failover segment with given 'segment_uuid' doesn't exist. exception.FailoverSegmentExists if failover segment with given name already exist. """ return IMPL.failover_segment_update(context, segment_uuid, values) def failover_segment_delete(context, segment_uuid): """Delete the failover segment. :param context: context to query under :param segment_uuid: uuid of segment to be deleted :raises exception.FailoverSegmentNotFound if failover segment with 'segment_uuid' doesn't exist. """ return IMPL.failover_segment_delete(context, segment_uuid) def is_failover_segment_under_recovery(context, failover_segment_id, filters=None): """Checks whether failover segment is used for processing any notification :param context: context to query under :param failover_segment_id: uuid of segment :param filters: dictionary of filters; values that are lists, tuples, sets, or frozensets cause an 'IN' test to be performed, while exact matching ('==' operator) is used for other values. :returns: Returns True if any of the host belonging to a failover segment is being used for processing any notifications which are in new, error or running status otherwise it will return False. """ return IMPL.is_failover_segment_under_recovery( context, failover_segment_id, filters=filters) # db apis for host def host_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all hosts that match all filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all hosts """ return IMPL.host_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def host_get_by_uuid(context, host_uuid, segment_uuid=None): """Get host information by uuid. :param context: context to query under :param host_uuid: uuid of host :param segment_uuid: uuid of failover_segment :returns: dictionary-like object containing host :raises: exception.HostNotFound if host with 'host_uuid' doesn't exist """ return IMPL.host_get_by_uuid(context, host_uuid, segment_uuid=segment_uuid) def host_get_by_id(context, host_id): """Get host information by id. :param context: context to query under :param host_id: id of host :returns: dictionary-like object containing host :raises: exception.HostNotFound if host with given ID doesn't exist """ return IMPL.host_get_by_id(context, host_id) def host_get_by_name(context, name): """Get host information by name. :param context: context to query under :param name: name of host :returns: dictionary-like object containing host :raises: exception.HostNotFoundByName if host with given 'name' doesn't exist """ return IMPL.host_get_by_name(context, name) def host_create(context, values): """Create a host. :param context: context to query under :param values: dictionary of host attributes to create :returns: dictionary-like object containing created host """ return IMPL.host_create(context, values) def host_update(context, host_uuid, values): """Update host information in the database. :param context: context to query under :param host_uuid: uuid of host to be updated :param values: dictionary of host attributes to be updated :returns: dictionary-like object containing updated host :raises: exception.HostNotFound if host with 'host_uuid' doesn't exist exception.HostExists if host with given 'name' already exist """ return IMPL.host_update(context, host_uuid, values) def host_delete(context, host_uuid): """Delete the host. :param context: context to query under :param host_uuid: uuid of host to be deleted :raises: exception.HostNotFound if host with 'host_uuid' doesn't exist """ return IMPL.host_delete(context, host_uuid) # notification related db apis def notifications_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all notifications that match all filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all notifications """ return IMPL.notifications_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def notification_get_by_uuid(context, notification_uuid): """Get notification information by uuid. :param context: context to query under :param notification_uuid: uuid of notification :returns: dictionary-like object containing notification :raises: exception.NotificationNotFound if notification with given 'notification_uuid' doesn't exist """ return IMPL.notification_get_by_uuid(context, notification_uuid) def notification_get_by_id(context, notification_id): """Get notification information by id. :param context: context to query under :param notification_id: id of notification :returns: dictionary-like object containing notification :raises: exception.NotificationNotFound if notification with given ID doesn't exist """ return IMPL.notification_get_by_id(context, notification_id) def notification_create(context, values): """Create a notification. :param context: context to query under :param values: dictionary of notification attributes to create :returns: dictionary-like object containing created notification """ return IMPL.notification_create(context, values) def notification_update(context, notification_uuid, values): """Update notification information in the database. :param context: context to query under :param notification_uuid: uuid of notification to be updated :param values: dictionary of notification attributes to be updated :returns: dictionary-like object containing updated notification :raises: exception.NotificationNotFound if notification with given 'notification_uuid' doesn't exist """ return IMPL.notification_update(context, notification_uuid, values) def notification_delete(context, notification_uuid): """Delete the notification. :param context: context to query under :param notification_uuid: uuid of notification to be deleted :raises: exception.NotificationNotFound if notification with given 'notification_uuid' doesn't exist """ return IMPL.notification_delete(context, notification_uuid) # vmoves related db apis def vmoves_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all vm moves that match the filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all vm moves """ return IMPL.vmoves_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def vmove_get_by_uuid(context, vmove_uuid): """Get one vm move information by uuid. :param context: context to query under :param uuid: uuid of the vm move :returns: dictionary-like object containing one vm move :raises: exception.VMoveNotFound if the vm move with given 'uuid' doesn't exist """ return IMPL.vmove_get_by_uuid(context, vmove_uuid) def vmove_create(context, values): """Create one vm move. :param context: context to query under :param values: dictionary of the vm move attributes to create :returns: dictionary-like object containing created one vm move """ return IMPL.vmove_create(context, values) def vmove_update(context, uuid, values): """Update one vm move information in the database. :param context: context to query under :param uuid: uuid of the vm move to be updated :param values: dictionary of the vm move attributes to be updated :returns: dictionary-like object containing updated one vm move :raises: exception.VMoveNotFound if the vm move with given 'uuid' doesn't exist """ return IMPL.vmove_update(context, uuid, values) def vmove_delete(context, uuid): """Delete one vm move. :param context: context to query under :param uuid: uuid of the vm move to be delete :raises exception.VMoveNotFound if the vm move not exist. """ return IMPL.vmove_delete(context, uuid) def purge_deleted_rows(context, age_in_days, max_rows): """Purge the soft deleted rows. :param context: context to query under :param age_in_days: Purge deleted rows older than age in days :param max_rows: Limit number of records to delete """ return IMPL.purge_deleted_rows(context, age_in_days, max_rows) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/migration.py0000664000175100017510000000155415033036143020153 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Database setup and migration commands.""" from masakari.db.sqlalchemy import migration IMPL = migration def db_sync(version=None): """Migrate the database to `version` or the most recent version.""" return IMPL.db_sync(version=version) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52771 masakari-19.1.0.dev18/masakari/db/sqlalchemy/0000775000175100017510000000000015033036146017750 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/__init__.py0000664000175100017510000000000015033036143022044 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/alembic.ini0000664000175100017510000000117615033036143022047 0ustar00mylesmyles[alembic] script_location = %(here)s/migrations prepend_sys_path = . version_path_separator = os sqlalchemy.url = sqlite:///masakari.db # Logging configuration [loggers] keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = WARN handlers = console qualname = [logger_sqlalchemy] level = WARN handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/api.py0000664000175100017510000006630415033036143021101 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Implementation of SQLAlchemy backend.""" import datetime import sys from oslo_db import api as oslo_db_api from oslo_db import exception as db_exc from oslo_db.sqlalchemy import enginefacade from oslo_db.sqlalchemy import utils as sqlalchemyutils from oslo_log import log as logging from oslo_utils import timeutils import sqlalchemy as sa from sqlalchemy.ext import compiler from sqlalchemy import MetaData from sqlalchemy import orm from sqlalchemy import sql import sqlalchemy.sql as sa_sql from sqlalchemy.sql import func import masakari.conf from masakari.db.sqlalchemy import models from masakari import exception from masakari.i18n import _ LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF main_context_manager = enginefacade.transaction_context() def _get_db_conf(conf_group, connection=None): kw = dict(conf_group.items()) if connection is not None: kw['connection'] = connection return kw def _context_manager_from_context(context): if context: try: return context.db_connection except AttributeError: pass def get_backend(): """The backend is this module itself.""" return sys.modules[__name__] def configure(conf): main_context_manager.configure(**_get_db_conf(conf.database)) def get_engine(use_slave=False, context=None): """Get a database engine object. :param use_slave: Whether to use the slave connection :param context: The request context that can contain a context manager """ ctxt_mgr = _context_manager_from_context(context) or main_context_manager return ctxt_mgr.get_legacy_facade().get_engine(use_slave=use_slave) def create_context_manager(connection=None): """Create a database context manager object. : param connection: The database connection string """ ctxt_mgr = enginefacade.transaction_context() ctxt_mgr.configure(**_get_db_conf(CONF.database, connection=connection)) return ctxt_mgr def model_query(context, model, args=None, read_deleted=None): """Query helper that accounts for context's `read_deleted` field. :param context: MasakariContext of the query. :param model: Model to query. Must be a subclass of ModelBase. :param args: Arguments to query. If None - model is used. :param read_deleted: If not None, overrides context's read_deleted field. Permitted values are 'no', which does not return deleted values; 'only', which only returns deleted values; and 'yes', which does not filter deleted values. """ if read_deleted is None: read_deleted = context.read_deleted query_kwargs = {} if 'no' == read_deleted: query_kwargs['deleted'] = False elif 'only' == read_deleted: query_kwargs['deleted'] = True elif 'yes' == read_deleted: pass else: raise ValueError(_("Unrecognized read_deleted value '%s'") % read_deleted) query = sqlalchemyutils.model_query( model, context.session, args, **query_kwargs) return query def _process_sort_params(sort_keys, sort_dirs, default_keys=['created_at', 'id'], default_dir='desc'): """Process the sort parameters to include default keys. Creates a list of sort keys and a list of sort directions. Adds the default keys to the end of the list if they are not already included. When adding the default keys to the sort keys list, the associated direction is: 1) The first element in the 'sort_dirs' list (if specified), else 2) 'default_dir' value (Note that 'asc' is the default value since this is the default in sqlalchemy.utils.paginate_query) :param sort_keys: List of sort keys to include in the processed list :param sort_dirs: List of sort directions to include in the processed list :param default_keys: List of sort keys that need to be included in the processed list, they are added at the end of the list if not already specified. :param default_dir: Sort direction associated with each of the default keys that are not supplied, used when they are added to the processed list :returns: list of sort keys, list of sort directions :raise exception.InvalidInput: If more sort directions than sort keys are specified or if an invalid sort direction is specified """ # Determine direction to use for when adding default keys default_dir_value = default_dir if sort_dirs and len(sort_dirs) != 0: default_dir_value = sort_dirs[0] # Create list of keys (do not modify the input list) result_keys = [] if sort_keys: result_keys = list(sort_keys) # If a list of directions is not provided, use the default sort direction # for all provided keys if sort_dirs: result_dirs = [] # Verify sort direction for sort_dir in sort_dirs: if sort_dir not in ('asc', 'desc'): msg = _("Unknown sort direction, must be 'asc' or 'desc'") raise exception.InvalidInput(reason=msg) result_dirs.append(sort_dir) else: result_dirs = [default_dir_value for _sort_key in result_keys] # Ensure that the key and direction length match while len(result_dirs) < len(result_keys): result_dirs.append(default_dir_value) # Unless more direction are specified, which is an error if len(result_dirs) > len(result_keys): msg = _("Sort direction size exceeds sort key size") raise exception.InvalidInput(reason=msg) # Ensure defaults are included for key in default_keys: if key not in result_keys: result_keys.append(key) result_dirs.append(default_dir_value) return result_keys, result_dirs @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): # NOTE(Dinesh_Bhor): If the limit is 0 there is no point in even going # to the database since nothing is going to be returned anyway. if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query(context, models.FailoverSegment) if 'recovery_method' in filters: query = query.filter(models.FailoverSegment.recovery_method == filters[ 'recovery_method']) if 'service_type' in filters: query = query.filter(models.FailoverSegment.service_type == filters[ 'service_type']) if 'enabled' in filters: query = query.filter(models.FailoverSegment.enabled == filters[ 'enabled']) marker_row = None if marker is not None: marker_row = model_query(context, models.FailoverSegment ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.FailoverSegment, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as e: raise exception.InvalidSortKey(e) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_by_id(context, segment_id): query = model_query(context, models.FailoverSegment).filter_by(id=segment_id) result = query.first() if not result: raise exception.FailoverSegmentNotFound(id=segment_id) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_by_uuid(context, segment_uuid): return _failover_segment_get_by_uuid(context, segment_uuid) def _failover_segment_get_by_uuid(context, segment_uuid): query = model_query(context, models.FailoverSegment).filter_by(uuid=segment_uuid) result = query.first() if not result: raise exception.FailoverSegmentNotFound(id=segment_uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_by_name(context, name): query = model_query(context, models.FailoverSegment).filter_by(name=name) result = query.first() if not result: raise exception.FailoverSegmentNotFoundByName(segment_name=name) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def failover_segment_create(context, values): segment = models.FailoverSegment() segment.update(values) try: segment.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.FailoverSegmentExists(name=segment.name) return _failover_segment_get_by_uuid(context, segment.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def failover_segment_update(context, segment_uuid, values): segment = _failover_segment_get_by_uuid(context, segment_uuid) segment.update(values) try: segment.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.FailoverSegmentExists(name=values.get('name')) return _failover_segment_get_by_uuid(context, segment.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def failover_segment_delete(context, segment_uuid): count = model_query(context, models.FailoverSegment ).filter_by(uuid=segment_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.FailoverSegmentNotFound(id=segment_uuid) model_query(context, models.Host).filter_by( failover_segment_id=segment_uuid).soft_delete( synchronize_session=False) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def is_failover_segment_under_recovery(context, failover_segment_id, filters=None): filters = filters or {} # get all hosts against the failover_segment inner_select = model_query( context, models.Host, (models.Host.uuid,)).filter( models.Host.failover_segment_id == failover_segment_id) # check if any host has notification status as new, running or error query = model_query(context, models.Notification, (func.count(models.Notification.id),)) if 'status' in filters: status = filters['status'] if isinstance(status, (list, tuple, set, frozenset)): column_attr = getattr(models.Notification, 'status') query = query.filter(column_attr.in_(status)) else: query = query.filter(models.Notification.status == status) query = query.filter( models.Notification.source_host_uuid.in_(inner_select.subquery())) return query.first()[0] > 0 # db apis for host @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): # NOTE(Dinesh_Bhor): If the limit is 0 there is no point in even going # to the database since nothing is going to be returned anyway. if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query( context, models.Host, ).options( orm.joinedload(models.Host.failover_segment), ) if 'failover_segment_id' in filters: query = query.filter(models.Host.failover_segment_id == filters[ 'failover_segment_id']) if 'type' in filters: query = query.filter(models.Host.type == filters['type']) if 'on_maintenance' in filters: query = query.filter(models.Host.on_maintenance == filters[ 'on_maintenance']) if 'reserved' in filters: query = query.filter(models.Host.reserved == filters['reserved']) marker_row = None if marker is not None: marker_row = model_query(context, models.Host ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.Host, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as e: raise exception.InvalidSortKey(e) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_by_uuid(context, host_uuid, segment_uuid=None): return _host_get_by_uuid(context, host_uuid, segment_uuid=segment_uuid) def _host_get_by_uuid(context, host_uuid, segment_uuid=None): query = model_query( context, models.Host, ).filter_by( uuid=host_uuid, ).options( orm.joinedload(models.Host.failover_segment), ) if segment_uuid: query = query.filter_by(failover_segment_id=segment_uuid) result = query.first() if not result: if segment_uuid: raise exception.HostNotFoundUnderFailoverSegment( host_uuid=host_uuid, segment_uuid=segment_uuid) else: raise exception.HostNotFound(id=host_uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_by_id(context, host_id): query = model_query( context, models.Host, ).filter_by( id=host_id, ).options( orm.joinedload(models.Host.failover_segment), ) result = query.first() if not result: raise exception.HostNotFound(id=host_id) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_by_name(context, name): query = model_query( context, models.Host, ).filter_by( name=name, ).options( orm.joinedload(models.Host.failover_segment), ) result = query.first() if not result: raise exception.HostNotFoundByName(host_name=name) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def host_create(context, values): host = models.Host() host.update(values) try: host.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.HostExists(name=host.name) return _host_get_by_uuid(context, host.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def host_update(context, host_uuid, values): host = _host_get_by_uuid(context, host_uuid) host.update(values) try: host.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.HostExists(name=values.get('name')) return _host_get_by_uuid(context, host.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def host_delete(context, host_uuid): count = model_query(context, models.Host ).filter_by(uuid=host_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.HostNotFound(id=host_uuid) # db apis for notifications @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def notifications_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): # NOTE(Dinesh_Bhor): If the limit is 0 there is no point in even going # to the database since nothing is going to be returned anyway. if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query(context, models.Notification) if 'source_host_uuid' in filters: query = query.filter(models.Notification.source_host_uuid == filters[ 'source_host_uuid']) if 'failover_segment_uuid' in filters: query = query.filter( models.Notification.failover_segment_uuid == filters[ 'failover_segment_uuid']) if 'type' in filters: query = query.filter(models.Notification.type == filters['type']) if 'status' in filters: status = filters['status'] if isinstance(status, (list, tuple, set, frozenset)): column_attr = getattr(models.Notification, 'status') query = query.filter(column_attr.in_(status)) else: query = query.filter(models.Notification.status == status) if 'generated-since' in filters: generated_since = timeutils.normalize_time(filters['generated-since']) query = query.filter( models.Notification.generated_time >= generated_since) marker_row = None if marker is not None: marker_row = model_query(context, models.Notification ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.Notification, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as err: raise exception.InvalidSortKey(err) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def notification_get_by_uuid(context, notification_uuid): return _notification_get_by_uuid(context, notification_uuid) def _notification_get_by_uuid(context, notification_uuid): query = model_query(context, models.Notification ).filter_by(notification_uuid=notification_uuid ) result = query.first() if not result: raise exception.NotificationNotFound(id=notification_uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def notification_get_by_id(context, notification_id): query = model_query(context, models.Notification ).filter_by(id=notification_id ) result = query.first() if not result: raise exception.NotificationNotFound(id=notification_id) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def notification_create(context, values): notification = models.Notification() notification.update(values) notification.save(session=context.session) return _notification_get_by_uuid(context, notification.notification_uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def notification_update(context, notification_uuid, values): notification = _notification_get_by_uuid(context, notification_uuid) notification.update(values) notification.save(session=context.session) return _notification_get_by_uuid(context, notification.notification_uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def notification_delete(context, notification_uuid): count = model_query(context, models.Notification ).filter_by(notification_uuid=notification_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.NotificationNotFound(id=notification_uuid) model_query(context, models.VMove).filter_by( notification_uuid=notification_uuid).soft_delete( synchronize_session=False) # db apis for vm moves @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def vmoves_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query(context, models.VMove) if 'notification_uuid' in filters: query = query.filter(models.VMove.notification_uuid == filters[ 'notification_uuid']) if 'type' in filters: query = query.filter(models.VMove.type == filters[ 'type']) if 'status' in filters: status = filters['status'] if isinstance(status, (list, tuple, set, frozenset)): column_attr = getattr(models.VMove, 'status') query = query.filter(column_attr.in_(status)) else: query = query.filter(models.VMove.status == status) marker_row = None if marker is not None: marker_row = model_query(context, models.VMove ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.VMove, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as err: raise exception.InvalidSortKey(err) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def vmove_get_by_uuid(context, uuid): return _vmove_get_by_uuid(context, uuid) def _vmove_get_by_uuid(context, uuid): query = model_query(context, models.VMove).filter_by(uuid=uuid) result = query.first() if not result: raise exception.VMoveNotFound(id=uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def vmove_create(context, values): vm_move = models.VMove() vm_move.update(values) vm_move.save(session=context.session) return _vmove_get_by_uuid(context, vm_move.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def vmove_update(context, uuid, values): vm_move = _vmove_get_by_uuid(context, uuid) vm_move.update(values) vm_move.save(session=context.session) return _vmove_get_by_uuid(context, vm_move.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def vmove_delete(context, vmove_uuid): count = model_query(context, models.VMove ).filter_by(uuid=vmove_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.VMoveNotFound(id=vmove_uuid) class DeleteFromSelect(sa_sql.expression.UpdateBase): inherit_cache = False def __init__(self, table, select, column): self.table = table self.select = select self.column = column # NOTE(pooja_jadhav): MySQL doesn't yet support subquery with # 'LIMIT & IN/ALL/ANY/SOME' We need work around this with nesting select. @compiler.compiles(DeleteFromSelect) def visit_delete_from_select(element, compiler, **kw): return "DELETE FROM %s WHERE %s in (SELECT T1.%s FROM (%s) as T1)" % ( compiler.process(element.table, asfrom=True), compiler.process(element.column), element.column.name, compiler.process(element.select)) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def purge_deleted_rows(context, age_in_days, max_rows): """Purges soft deleted rows Deleted rows get purged from hosts and segment tables based on deleted_at column. As notifications table doesn't delete any of the notification records so rows get purged from notifications based on last updated_at and status column. """ engine = get_engine() metadata = MetaData() metadata.reflect(engine) deleted_age = timeutils.utcnow() - datetime.timedelta(days=age_in_days) total_rows_purged = 0 for table in reversed(metadata.sorted_tables): if 'deleted' not in table.columns.keys(): continue LOG.info('Purging deleted rows older than %(age_in_days)d day(s) ' 'from table %(tbl)s', {'age_in_days': age_in_days, 'tbl': table}) column = table.c.id updated_at_column = table.c.updated_at deleted_at_column = table.c.deleted_at if table.name == 'notifications': status_column = table.c.status query_delete = sql.select(column).where( sa.and_( updated_at_column < deleted_age, sa.or_( status_column == 'finished', status_column == 'failed', status_column == 'ignored', ), ), ).order_by(status_column) else: query_delete = sql.select( column, ).where( deleted_at_column < deleted_age, ).order_by( deleted_at_column, ) if max_rows > 0: query_delete = query_delete.limit(max_rows - total_rows_purged) delete_statement = DeleteFromSelect(table, query_delete, column) with engine.connect() as conn, conn.begin(): result = conn.execute(delete_statement) rows = result.rowcount LOG.info('Deleted %(rows)d row(s) from table %(tbl)s', {'rows': rows, 'tbl': table}) total_rows_purged += rows if max_rows > 0 and total_rows_purged == max_rows: break LOG.info('Total deleted rows are %(rows)d', {'rows': total_rows_purged}) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migration.py0000664000175100017510000001612515033036143022315 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from alembic import command as alembic_api from alembic import config as alembic_config from alembic import migration as alembic_migration from oslo_config import cfg from oslo_db import options from oslo_log import log as logging import sqlalchemy as sa import masakari.conf from masakari.db import api as db_api from masakari.engine import driver from masakari import exception from masakari.i18n import _ options.set_defaults(cfg.CONF) LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF def _migrate_legacy_database(engine, connection, config): """Check if database is a legacy sqlalchemy-migrate-managed database. If it is, migrate it by "stamping" the initial alembic schema. """ # If the database doesn't have the sqlalchemy-migrate legacy migration # table, we don't have anything to do if not sa.inspect(engine).has_table('migrate_version'): return # Likewise, if we've already migrated to alembic, we don't have anything to # do context = alembic_migration.MigrationContext.configure( connection, opts={'version_table': 'masakari_alembic_version'}) if context.get_current_revision(): return # We have legacy migrations but no alembic migration. Stamp (dummy apply) # the initial alembic migration(s). There may be one or two to apply # depending on what's already applied. # Get the currently applied version of the legacy migrations using table # reflection to avoid a dependency on sqlalchemy-migrate # https://opendev.org/x/sqlalchemy-migrate/src/commit/5d1f322542cd8eb42381612765be4ed9ca8105ec/migrate/versioning/schema.py#L175-L179 meta = sa.MetaData() table = sa.Table('migrate_version', meta, autoload_with=engine) with engine.connect() as conn: version = conn.execute(sa.select(table.c.version)).scalar() # If the user is requesting a skip-level upgrade from a very old version, # we can't help them since we don't have alembic-versions of those old # migrations :( if version < 7: reason = _( 'Your database is at version %03d; we only support upgrading ' 'from version 007 or later. Please upgrade your database using ' 'an earlier release of Masakari and then return here.' ) raise exception.InvalidInput(reason % version) elif version > 8: if os.getenv('FORCE_MASAKARI_DB_SYNC') is None: reason = _( 'Your database is at version %03d; we do not recognise this ' 'version and it is likely you are carrying out-of-tree ' 'migrations. You can still upgrade but we cannot guarantee ' 'things will work as expected. ' 'If you wish to continue, set the FORCE_MASAKARI_DB_SYNC ' 'environment variable to any value and retry.' ) raise exception.InvalidInput(reason % version) else: msg = _( 'Your database is at version %03d; we do not recognise this ' 'version but the FORCE_MASAKARI_DB_SYNC environment variable ' 'is set so we are continuing. Things may break. ' 'You have been warned!', ) LOG.warning(msg, version) if version == 7: alembic_init_version = '8f848eb45d03' else: # 8 or greater (out-of-tree) alembic_init_version = '8bdf5929c5a6' LOG.info( 'The database is still under sqlalchemy-migrate control; ' 'fake applying the initial alembic migration' ) alembic_api.stamp(config, alembic_init_version) def _find_alembic_conf(): """Get the project's alembic configuration :returns: An instance of ``alembic.config.Config`` """ path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'alembic.ini', ) config = alembic_config.Config(os.path.abspath(path)) # We don't want to use the logger configuration from the file, which is # only really intended for the CLI # https://stackoverflow.com/a/42691781/613428 config.attributes['configure_logger'] = False return config def _upgrade_alembic(engine, config, version): # re-use the connection rather than creating a new one with engine.begin() as connection: config.attributes['connection'] = connection _migrate_legacy_database(engine, connection, config) alembic_api.upgrade(config, version or 'head') def db_sync(version=None, engine=None): """Migrate the database to `version` or the most recent version.""" # If the user requested a specific version, check if it's an integer: if # so, we're almost certainly in sqlalchemy-migrate land and won't support # that if version is not None and version.isdigit(): raise ValueError( 'You requested an sqlalchemy-migrate database version; this is ' 'no longer supported' ) if engine is None: engine = db_api.get_engine() config = _find_alembic_conf() # Discard the URL encoded in alembic.ini in favour of the URL configured # for the engine by the database fixtures, casting from # 'sqlalchemy.engine.url.URL' to str in the process. This returns a # RFC-1738 quoted URL, which means that a password like "foo@" will be # turned into "foo%40". This in turns causes a problem for # set_main_option() because that uses ConfigParser.set, which (by design) # uses *python* interpolation to write the string out ... where "%" is the # special python interpolation character! Avoid this mismatch by quoting # all %'s for the set below. engine_url = str(engine.url).replace('%', '%%') config.set_main_option('sqlalchemy.url', str(engine_url)) # First upgrade ourselves, followed by Taskflow LOG.info('Applying migration(s)') _upgrade_alembic(engine, config, version) # Get the taskflow driver configured, default is 'taskflow_driver', # to load persistence tables to store progress details. taskflow_driver = driver.load_masakari_driver(CONF.notification_driver) if CONF.taskflow.connection: taskflow_driver.upgrade_backend(CONF.taskflow.connection) LOG.info('Migration(s) applied') def db_version(): """Get database version.""" engine = db_api.get_engine() with engine.connect() as connection: m_context = alembic_migration.MigrationContext.configure( connection, opts={'version_table': 'masakari_alembic_version'} ) return m_context.get_current_revision() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/0000775000175100017510000000000015033036146022124 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/README.rst0000664000175100017510000000101215033036143023602 0ustar00mylesmylesDatabase migrations =================== This directory contains migrations for the database. These are implemented using `alembic`__, a lightweight database migration tool designed for usage with `SQLAlchemy`__. The best place to start understanding Alembic is with its own `tutorial`__. You can also play around with the :command:`alembic` command:: $ alembic --help .. __: https://alembic.sqlalchemy.org/en/latest/ .. __: https://www.sqlalchemy.org/ .. __: https://alembic.sqlalchemy.org/en/latest/tutorial.html ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/env.py0000664000175100017510000000707315033036143023272 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from logging.config import fileConfig from alembic import context from sqlalchemy import engine_from_config from sqlalchemy import pool from masakari.db.sqlalchemy import models # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. if config.attributes.get('configure_logger', True): fileConfig(config.config_file_name) # this is the MetaData object for the various models in the database target_metadata = models.BASE.metadata def run_migrations_offline() -> None: """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure( url=url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, # use a unique version table name to avoid conflicts with taskflow version_table='masakari_alembic_version', ) with context.begin_transaction(): context.run_migrations() def run_migrations_online() -> None: """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. This is modified from the default based on the below, since we want to share an engine when unit testing so in-memory database testing actually works. https://alembic.sqlalchemy.org/en/latest/cookbook.html#connection-sharing """ connectable = config.attributes.get('connection', None) if connectable is None: # only create Engine if we don't have a Connection from the outside connectable = engine_from_config( config.get_section(config.config_ini_section), prefix="sqlalchemy.", poolclass=pool.NullPool, ) with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata, render_as_batch=True, # use a unique version table name to avoid conflicts with # taskflow version_table='masakari_alembic_version', ) with context.begin_transaction(): context.run_migrations() else: context.configure( connection=connectable, target_metadata=target_metadata, render_as_batch=True, # use a unique version table name to avoid conflicts with taskflow version_table='masakari_alembic_version', ) with context.begin_transaction(): context.run_migrations() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/script.py.mako0000664000175100017510000000173015033036143024726 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ from alembic import op import sqlalchemy as sa ${imports if imports else ""} # revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} def upgrade() -> None: ${upgrades if upgrades else "pass"} ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/versions/0000775000175100017510000000000015033036146023774 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000021200000000000010210 xustar00116 path=masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/versions/13adff5efb9a_extend_notification_table.py 22 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/versions/13adff5efb9a_extend_notification_ta0000664000175100017510000000213015033036143032551 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add vm moves table Revision ID: 13adff5efb9a Revises: 8bdf5929c5a6 Create Date: 2025-05-13 14:10:42.220612 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '13adff5efb9a' down_revision = '8bdf5929c5a6' branch_labels = None depends_on = None def upgrade() -> None: with op.batch_alter_table('notifications') as batch_op: batch_op.add_column(sa.Column('failover_segment_uuid', sa.String(length=36))) batch_op.add_column(sa.Column('message', sa.Text())) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/versions/8bdf5929c5a6_add_vm_moves_table.py0000664000175100017510000000423615033036143032075 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add vm moves table Revision ID: 8bdf5929c5a6 Revises: 8f848eb45d03 Create Date: 2023-07-13 12:13:42.240598 """ from alembic import op from oslo_db.sqlalchemy import types as oslo_db_types import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '8bdf5929c5a6' down_revision = '8f848eb45d03' branch_labels = None depends_on = None def upgrade() -> None: op.create_table( 'vmoves', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column( 'deleted', oslo_db_types.SoftDeleteInteger(), nullable=True, ), sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('uuid', sa.String(length=36), nullable=False), sa.Column('notification_uuid', sa.String(length=36), nullable=False), sa.Column('instance_uuid', sa.String(length=36), nullable=False), sa.Column('instance_name', sa.String(length=255), nullable=False), sa.Column('source_host', sa.String(length=255), nullable=True), sa.Column('dest_host', sa.String(length=255), nullable=True), sa.Column('start_time', sa.DateTime(), nullable=True), sa.Column('end_time', sa.DateTime(), nullable=True), sa.Column('type', sa.String(length=36), nullable=True), sa.Column('status', sa.String(length=255), nullable=True), sa.Column('message', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('uuid', name='uniq_vmove0uuid'), ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/migrations/versions/8f848eb45d03_initial_revision.py0000664000175100017510000001133315033036143031543 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Initial revision Revision ID: 8f848eb45d03 Revises: Create Date: 2023-07-13 12:00:07.851502 """ from alembic import op from oslo_db.sqlalchemy import types as oslo_db_types import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '8f848eb45d03' down_revision = None branch_labels = None depends_on = None def upgrade() -> None: op.create_table( 'failover_segments', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column( 'deleted', oslo_db_types.SoftDeleteInteger(), nullable=True, ), sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('uuid', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('service_type', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column( 'recovery_method', sa.Enum( 'auto', 'reserved_host', 'auto_priority', 'rh_priority', name='recovery_methods', ), nullable=False, ), sa.Column('enabled', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint( 'name', 'deleted', name='uniq_segment0name0deleted' ), sa.UniqueConstraint('uuid', name='uniq_segments0uuid'), ) op.create_index( 'segments_service_type_idx', 'failover_segments', ['service_type'], unique=False, ) op.create_table( 'notifications', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column( 'deleted', oslo_db_types.SoftDeleteInteger(), nullable=True, ), sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('notification_uuid', sa.String(length=36), nullable=False), sa.Column('generated_time', sa.DateTime(), nullable=False), sa.Column('source_host_uuid', sa.String(length=36), nullable=False), sa.Column('type', sa.String(length=36), nullable=False), sa.Column('payload', sa.Text(), nullable=True), sa.Column( 'status', sa.Enum( 'new', 'running', 'error', 'failed', 'ignored', 'finished', name='notification_status', ), nullable=False, ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint( 'notification_uuid', name='uniq_notification0uuid' ), ) op.create_table( 'hosts', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column( 'deleted', oslo_db_types.SoftDeleteInteger(), nullable=True, ), sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('uuid', sa.String(length=36), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('reserved', sa.Boolean(), nullable=True), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('control_attributes', sa.Text(), nullable=False), sa.Column('failover_segment_id', sa.String(length=36), nullable=False), sa.Column('on_maintenance', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint( ['failover_segment_id'], ['failover_segments.uuid'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name', 'deleted', name='uniq_host0name0deleted'), sa.UniqueConstraint('uuid', name='uniq_host0uuid'), ) op.create_index('hosts_type_idx', 'hosts', ['type'], unique=False) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/db/sqlalchemy/models.py0000664000175100017510000001567415033036143021617 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_db.sqlalchemy import models from oslo_utils import timeutils from sqlalchemy import (Column, DateTime, Index, Integer, Enum, String, schema) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import orm from sqlalchemy import ForeignKey, Boolean, Text BASE = declarative_base() class MasakariTimestampMixin(object): # Note(tpatil): timeutils.utcnow() method return microseconds part but db # doesn't store it because of which subsequent calls to get resources # from the same db session object instance doesn't return microsecond for # datetime fields. To avoid this discrepancy, removed microseconds from # datetime fields so that there is no need to remove it for create/update # cases in the respective versioned objects. created_at = Column(DateTime, default=lambda: timeutils.utcnow().replace( microsecond=0)) updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow().replace( microsecond=0)) class MasakariAPIBase(MasakariTimestampMixin, models.ModelBase): """Base class for MasakariAPIBase Models.""" metadata = None def __copy__(self): """Implement a safe copy.copy(). SQLAlchemy-mapped objects travel with an object called an InstanceState, which is pegged to that object specifically and tracks everything about that object. It's critical within all attribute operations, including gets and deferred loading. This object definitely cannot be shared among two instances, and must be handled. The copy routine here makes use of session.merge() which already essentially implements a "copy" style of operation, which produces a new instance with a new InstanceState and copies all the data along mapped attributes without using any SQL. The mode we are using here has the caveat that the given object must be "clean", e.g. that it has no database-loaded state that has been updated and not flushed. This is a good thing, as creating a copy of an object including non-flushed, pending database state is probably not a good idea; neither represents what the actual row looks like, and only one should be flushed. """ session = orm.Session() copy = session.merge(self, load=False) session.expunge(copy) return copy class FailoverSegment(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents a failover segment.""" __tablename__ = 'failover_segments' __table_args__ = ( schema.UniqueConstraint("name", "deleted", name="uniq_segment0name0deleted"), schema.UniqueConstraint('uuid', name='uniq_segments0uuid'), Index('segments_service_type_idx', 'service_type'), ) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(36), nullable=False) name = Column(String(255), nullable=False) service_type = Column(String(255), nullable=False) enabled = Column(Boolean, default=True) description = Column(Text) recovery_method = Column(Enum('auto', 'reserved_host', 'auto_priority', 'rh_priority', name='recovery_methods'), nullable=False) class Host(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents a host.""" __tablename__ = 'hosts' __table_args__ = ( schema.UniqueConstraint("name", "deleted", name="uniq_host0name0deleted"), schema.UniqueConstraint('uuid', name='uniq_host0uuid'), Index('hosts_type_idx', 'type'), ) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(36), nullable=False) name = Column(String(255), nullable=False) reserved = Column(Boolean, default=False) type = Column(String(255), nullable=False) control_attributes = Column(Text, nullable=False) on_maintenance = Column(Boolean, default=False) failover_segment_id = Column(String(36), ForeignKey('failover_segments.uuid'), nullable=False) failover_segment = orm.relationship(FailoverSegment, backref=orm.backref('hosts'), foreign_keys=failover_segment_id, primaryjoin='and_(Host.' 'failover_segment_id==' 'FailoverSegment.uuid,' 'Host.deleted==0)') class Notification(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents a notification.""" __tablename__ = 'notifications' __table_args__ = ( schema.UniqueConstraint('notification_uuid', name='uniq_notification0uuid'), ) id = Column(Integer, primary_key=True, autoincrement=True) notification_uuid = Column(String(36), nullable=False) generated_time = Column(DateTime, nullable=False) type = Column(String(36), nullable=False) payload = Column(Text) status = Column(Enum('new', 'running', 'error', 'failed', 'ignored', 'finished', name='notification_status'), nullable=False) source_host_uuid = Column(String(36), nullable=False) failover_segment_uuid = Column(String(36), nullable=False) message = Column(Text) class VMove(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents one vm move.""" __tablename__ = 'vmoves' __table_args__ = ( schema.UniqueConstraint('uuid', name='uniq_vmove0uuid'), ) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(36), nullable=False) notification_uuid = Column(String(36), nullable=False) instance_uuid = Column(String(36), nullable=False) instance_name = Column(String(255), nullable=False) source_host = Column(String(255), nullable=True) dest_host = Column(String(255), nullable=True) start_time = Column(DateTime, nullable=True) end_time = Column(DateTime, nullable=True) type = Column(String(36), nullable=True) status = Column(String(255), nullable=True) message = Column(Text) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/engine/0000775000175100017510000000000015033036146016466 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/__init__.py0000664000175100017510000000000015033036143020562 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/driver.py0000664000175100017510000000560115033036143020332 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Driver base-class: (Beginning of) the contract that masakari drivers must follow, and shared types that support that contract """ import abc import sys from oslo_log import log as logging from stevedore import driver import masakari.conf from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) class NotificationDriver(object, metaclass=abc.ABCMeta): @abc.abstractmethod def execute_host_failure(self, context, host_name, recovery_method, notification_uuid, **kwargs): pass @abc.abstractmethod def execute_instance_failure(self, context, instance_uuid, notification_uuid): pass @abc.abstractmethod def execute_process_failure(self, context, process_name, host_name, notification_uuid): pass @abc.abstractmethod def get_notification_recovery_workflow_details(self, context, recovery_method, notification_uuid): pass @abc.abstractmethod def upgrade_backend(self, backend): pass def load_masakari_driver(masakari_driver=None): """Load a masakari driver module. Load the masakari driver module specified by the notification_driver configuration option or, if supplied, the driver name supplied as an argument. :param masakari_driver: a masakari driver name to override the config opt :returns: a NotificationDriver instance """ if not masakari_driver: masakari_driver = CONF.notification_driver if not masakari_driver: LOG.error("Notification driver option required, but not specified") sys.exit(1) LOG.info("Loading masakari notification driver '%s'", masakari_driver) try: notification_driver = driver.DriverManager('masakari.driver', masakari_driver, invoke_on_load=True).driver return utils.check_isinstance(notification_driver, NotificationDriver) except ImportError: LOG.exception("Failed to load notification driver '%s'.", masakari_driver) sys.exit(1) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/engine/drivers/0000775000175100017510000000000015033036146020144 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/__init__.py0000664000175100017510000000125015033036143022250 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/0000775000175100017510000000000015033036146021776 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/__init__.py0000664000175100017510000000131415033036143024103 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.engine.drivers.taskflow import driver TaskFlowDriver = driver.TaskFlowDriver ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/base.py0000664000175100017510000001223715033036143023264 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import os from oslo_log import log as logging from oslo_utils import timeutils from stevedore import named # For more information please visit: https://wiki.openstack.org/wiki/TaskFlow import taskflow.engines from taskflow import exceptions from taskflow import formatters from taskflow.listeners import base from taskflow.listeners import logging as logging_listener from taskflow.persistence import backends from taskflow.persistence import models from taskflow import task import masakari.conf from masakari import exception CONF = masakari.conf.CONF PERSISTENCE_BACKEND = CONF.taskflow.connection LOG = logging.getLogger(__name__) class MasakariTask(task.Task): """The root task class for all masakari tasks. It automatically names the given task using the module and class that implement the given task as the task name. """ def __init__(self, context, novaclient, **kwargs): requires = kwargs.get('requires') rebind = kwargs.get('rebind') provides = kwargs.get('provides') super(MasakariTask, self).__init__(self.__class__.__name__, requires=requires, rebind=rebind, provides=provides) self.context = context self.novaclient = novaclient self.progress = [] def update_details(self, progress_data, progress=0.0): progress_details = { 'timestamp': str(timeutils.utcnow()), 'progress': progress, 'message': progress_data } self.progress.append(progress_details) self._notifier.notify('update_progress', {'progress': progress, "progress_details": self.progress}) class SpecialFormatter(formatters.FailureFormatter): # Exception is an excepted case, don't include traceback in log if fails. _NO_TRACE_EXCEPTIONS = (exception.SkipInstanceRecoveryException, exception.SkipHostRecoveryException) def __init__(self, engine): super(SpecialFormatter, self).__init__(engine) def format(self, fail, atom_matcher): if fail.check(*self._NO_TRACE_EXCEPTIONS) is not None: exc_info = None exc_details = '%s%s' % (os.linesep, fail.pformat(traceback=False)) return (exc_info, exc_details) else: return super(SpecialFormatter, self).format(fail, atom_matcher) class DynamicLogListener(logging_listener.DynamicLoggingListener): """This is used to attach to taskflow engines while they are running. It provides a bunch of useful features that expose the actions happening inside a taskflow engine, which can be useful for developers for debugging, for operations folks for monitoring and tracking of the resource actions and more... """ def __init__(self, engine, task_listen_for=base.DEFAULT_LISTEN_FOR, flow_listen_for=base.DEFAULT_LISTEN_FOR, retry_listen_for=base.DEFAULT_LISTEN_FOR, logger=LOG): super(DynamicLogListener, self).__init__( engine, task_listen_for=task_listen_for, flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for, log=logger, fail_formatter=SpecialFormatter(engine)) def get_recovery_flow(task_list, **kwargs): """This is used create extension object from provided task_list. This method returns the extension object of the each task provided in a list using stevedore extension manager. """ extensions = named.NamedExtensionManager( 'masakari.task_flow.tasks', names=task_list, name_order=True, invoke_on_load=True, invoke_kwds=kwargs) for extension in extensions.extensions: yield extension.obj def load_taskflow_into_engine(action, nested_flow, process_what): book = None backend = None if PERSISTENCE_BACKEND: backend = backends.fetch(PERSISTENCE_BACKEND) with contextlib.closing(backend.get_connection()) as conn: try: book = conn.get_logbook(process_what['notification_uuid']) except exceptions.NotFound: pass if book is None: book = models.LogBook(action, process_what['notification_uuid']) return taskflow.engines.load(nested_flow, store=process_what, backend=backend, book=book) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/driver.py0000664000175100017510000004007215033036143023643 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Driver TaskFlowDriver: Execute notification workflows using taskflow. """ from collections import OrderedDict import contextlib from oslo_log import log as logging from oslo_utils import excutils from taskflow import exceptions from taskflow.persistence import backends from masakari.compute import nova import masakari.conf from masakari.engine import driver from masakari.engine.drivers.taskflow import base from masakari.engine.drivers.taskflow import host_failure from masakari.engine.drivers.taskflow import instance_failure from masakari.engine.drivers.taskflow import process_failure from masakari import exception from masakari.i18n import _ from masakari import objects from masakari.objects import fields CONF = masakari.conf.CONF TASKFLOW_CONF = CONF.taskflow_driver_recovery_flows PERSISTENCE_BACKEND = CONF.taskflow.connection LOG = logging.getLogger(__name__) class TaskFlowDriver(driver.NotificationDriver): def __init__(self): super(TaskFlowDriver, self).__init__() def _execute_auto_workflow(self, context, novaclient, process_what): flow_engine = host_failure.get_auto_flow(context, novaclient, process_what) # Attaching this listener will capture all of the notifications # that taskflow sends out and redirect them to a more useful # log for masakari's debugging (or error reporting) usage. with base.DynamicLogListener(flow_engine, logger=LOG): flow_engine.run() def _execute_rh_workflow(self, context, novaclient, process_what, **kwargs): if not kwargs['reserved_host_list']: msg = _('No reserved_hosts available for evacuation.') raise exception.ReservedHostsUnavailable(message=msg) process_what['reserved_host_list'] = kwargs.pop('reserved_host_list') flow_engine = host_failure.get_rh_flow(context, novaclient, process_what, **kwargs) with base.DynamicLogListener(flow_engine, logger=LOG): try: flow_engine.run() except exception.LockAlreadyAcquired as ex: raise exception.HostRecoveryFailureException(ex.message) def _execute_auto_priority_workflow(self, context, novaclient, process_what, **kwargs): try: self._execute_auto_workflow(context, novaclient, process_what) except Exception as ex: with excutils.save_and_reraise_exception(reraise=False) as ctxt: if isinstance(ex, exception.SkipHostRecoveryException): ctxt.reraise = True return # Caught generic Exception to make sure that any failure # should lead to execute 'reserved_host' recovery workflow. msg = ("Failed to evacuate all instances from " "failed_host: '%(failed_host)s' using " "'%(auto)s' workflow, retrying using " "'%(reserved_host)s' workflow.") LOG.warning(msg, { 'failed_host': process_what['host_name'], 'auto': fields.FailoverSegmentRecoveryMethod.AUTO, 'reserved_host': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST }) self._execute_rh_workflow(context, novaclient, process_what, **kwargs) def _execute_rh_priority_workflow(self, context, novaclient, process_what, **kwargs): try: self._execute_rh_workflow(context, novaclient, process_what, **kwargs) except Exception as ex: with excutils.save_and_reraise_exception(reraise=False) as ctxt: if isinstance(ex, exception.SkipHostRecoveryException): ctxt.reraise = True return # Caught generic Exception to make sure that any failure # should lead to execute 'auto' recovery workflow. msg = ("Failed to evacuate all instances from " "failed_host '%(failed_host)s' using " "'%(reserved_host)s' workflow, retrying using " "'%(auto)s' workflow") LOG.warning(msg, { 'failed_host': process_what['host_name'], 'reserved_host': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, 'auto': fields.FailoverSegmentRecoveryMethod.AUTO }) self._execute_auto_workflow(context, novaclient, process_what) def execute_host_failure(self, context, host_name, recovery_method, notification_uuid, **kwargs): novaclient = nova.API() # get flow for host failure process_what = { 'host_name': host_name, 'notification_uuid': notification_uuid } try: if recovery_method == fields.FailoverSegmentRecoveryMethod.AUTO: self._execute_auto_workflow(context, novaclient, process_what) elif recovery_method == ( fields.FailoverSegmentRecoveryMethod.RESERVED_HOST): self._execute_rh_workflow(context, novaclient, process_what, **kwargs) elif recovery_method == ( fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY): self._execute_auto_priority_workflow( context, novaclient, process_what, **kwargs) else: self._execute_rh_priority_workflow(context, novaclient, process_what, **kwargs) except Exception as exc: with excutils.save_and_reraise_exception(reraise=False) as ctxt: if isinstance(exc, (exception.SkipHostRecoveryException, exception.HostRecoveryFailureException, exception.ReservedHostsUnavailable)): ctxt.reraise = True return msg = _("Failed to execute host failure flow for " "notification '%s'.") % notification_uuid raise exception.MasakariException(msg) def execute_instance_failure(self, context, instance_uuid, notification_uuid): novaclient = nova.API() # get flow for instance failure process_what = { 'instance_uuid': instance_uuid, 'notification_uuid': notification_uuid } try: flow_engine = instance_failure.get_instance_recovery_flow( context, novaclient, process_what) except Exception: msg = _("Failed to create instance failure flow for " "notification '%s'.") % notification_uuid LOG.exception(msg) raise exception.MasakariException(msg) # Attaching this listener will capture all of the notifications that # taskflow sends out and redirect them to a more useful log for # masakari's debugging (or error reporting) usage. with base.DynamicLogListener(flow_engine, logger=LOG): try: flow_engine.run() except Exception as exc: with excutils.save_and_reraise_exception(reraise=False) as e: if isinstance( exc, (exception.SkipInstanceRecoveryException, exception.IgnoreInstanceRecoveryException, exception.InstanceRecoveryFailureException)): e.reraise = True return msg = _("Failed to execute instance failure flow for " "notification '%s'.") % notification_uuid raise exception.MasakariException(msg) def execute_process_failure(self, context, process_name, host_name, notification_uuid): novaclient = nova.API() # get flow for process failure process_what = { 'process_name': process_name, 'host_name': host_name, 'notification_uuid': notification_uuid } # TODO(abhishekk) We need to create a map for process_name and # respective python-client so that we can pass appropriate client # as a input to the process. if process_name == "nova-compute": recovery_flow = process_failure.get_compute_process_recovery_flow else: LOG.warning("Skipping recovery for process: %s.", process_name) raise exception.SkipProcessRecoveryException() try: flow_engine = recovery_flow(context, novaclient, process_what) except Exception: msg = _("Failed to create process failure flow for " "notification '%s'.") % notification_uuid LOG.exception(msg) raise exception.MasakariException(msg) # Attaching this listener will capture all of the notifications that # taskflow sends out and redirect them to a more useful log for # masakari's debugging (or error reporting) usage. with base.DynamicLogListener(flow_engine, logger=LOG): try: flow_engine.run() except Exception as exc: with excutils.save_and_reraise_exception(reraise=False) as e: if isinstance( exc, exception.ProcessRecoveryFailureException): e.reraise = True return msg = _("Failed to execute instance failure flow for " "notification '%s'.") % notification_uuid raise exception.MasakariException(msg) @contextlib.contextmanager def upgrade_backend(self, persistence_backend): try: backend = backends.fetch(persistence_backend) with contextlib.closing(backend.get_connection()) as conn: conn.upgrade() except exceptions.NotFound as e: raise e def _get_taskflow_sequence(self, context, recovery_method, notification): # Get the taskflow sequence based on the recovery method. novaclient = nova.API() task_list = [] # Get linear task flow based on notification type if notification.type == fields.NotificationType.VM: tasks = TASKFLOW_CONF.instance_failure_recovery_tasks elif notification.type == fields.NotificationType.PROCESS: tasks = TASKFLOW_CONF.process_failure_recovery_tasks elif notification.type == fields.NotificationType.COMPUTE_HOST: if recovery_method in [ fields.FailoverSegmentRecoveryMethod.AUTO, fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY]: tasks = TASKFLOW_CONF.host_auto_failure_recovery_tasks elif recovery_method in [ fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, fields.FailoverSegmentRecoveryMethod.RH_PRIORITY]: tasks = TASKFLOW_CONF.host_rh_failure_recovery_tasks for plugin in base.get_recovery_flow( tasks['pre'], context=context, novaclient=novaclient, update_host_method=None): task_list.append(plugin.name) for plugin in base.get_recovery_flow( tasks['main'], context=context, novaclient=novaclient, update_host_method=None): task_list.append(plugin.name) for plugin in base.get_recovery_flow( tasks['post'], context=context, novaclient=novaclient, update_host_method=None): task_list.append(plugin.name) return task_list def get_notification_recovery_workflow_details(self, context, recovery_method, notification): """Retrieve progress details in notification""" backend = backends.fetch(PERSISTENCE_BACKEND) with contextlib.closing(backend.get_connection()) as conn: progress_details = [] flow_details = conn.get_flows_for_book( notification.notification_uuid) for flow in flow_details: od = OrderedDict() atom_details = list(conn.get_atoms_for_flow(flow.uuid)) # TODO(ShilpaSD): In case recovery_method is auto_priority/ # rh_priority, there is no way to figure out whether the # recovery was done successfully using AUTO or RH flow. # Taskflow stores 'retry_instance_evacuate_engine_retry' task # in case of RH flow so if # 'retry_instance_evacuate_engine_retry' is stored in the # given flow details then the sorting of task details should # happen based on the RH flow. # This logic won't be required after LP #1815738 is fixed. if recovery_method in ['AUTO_PRIORITY', 'RH_PRIORITY']: persisted_task_list = [atom.name for atom in atom_details] if ('retry_instance_evacuate_engine_retry' in persisted_task_list): recovery_method = ( fields.FailoverSegmentRecoveryMethod. RESERVED_HOST) else: recovery_method = ( fields.FailoverSegmentRecoveryMethod.AUTO) # TODO(ShilpaSD): Taskflow doesn't support to return task # details in the same sequence in which all tasks are # executed. Reported this issue in LP #1815738. To resolve # this issue load the tasks based on the recovery method and # later sort it based on this task list so progress_details # can be returned in the expected order. task_list = self._get_taskflow_sequence(context, recovery_method, notification) for task in task_list: for atom in atom_details: if task == atom.name: od[atom.name] = atom for key, value in od.items(): # Add progress_details only if tasks are executed and meta # is available in which progress_details are stored. if value.meta and value.meta.get("progress_details"): progress_details_obj = ( objects.NotificationProgressDetails.create( value.name, value.meta['progress'], value.meta['progress_details']['details'] ['progress_details'], value.state)) progress_details.append(progress_details_obj) return progress_details ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/host_failure.py0000664000175100017510000005437015033036143025042 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import eventlet from eventlet import greenpool from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from oslo_utils import excutils from oslo_utils import strutils from oslo_utils import timeutils from taskflow.patterns import linear_flow from taskflow import retry import masakari.conf from masakari.engine.drivers.taskflow import base from masakari import exception from masakari import objects from masakari.objects import fields from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) ACTION = 'instance:evacuate' # Instance power_state SHUTDOWN = 4 TASKFLOW_CONF = cfg.CONF.taskflow_driver_recovery_flows class DisableComputeServiceTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["host_name"] super(DisableComputeServiceTask, self).__init__(context, novaclient, **kwargs) def execute(self, host_name): msg = "Disabling compute service on host: '%s'" % host_name self.update_details(msg) self.novaclient.enable_disable_service(self.context, host_name, reason=CONF.host_failure.service_disable_reason) # Sleep until nova-compute service is marked as disabled. log_msg = ("Sleeping %(wait)s sec before starting recovery " "thread until nova recognizes the node down.") LOG.info(log_msg, {'wait': CONF.wait_period_after_service_update}) eventlet.sleep(CONF.wait_period_after_service_update) msg = "Disabled compute service on host: '%s'" % host_name self.update_details(msg, 1.0) class PrepareHAEnabledInstancesTask(base.MasakariTask): """Get all HA_Enabled instances.""" def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["host_name", "notification_uuid"] super(PrepareHAEnabledInstancesTask, self).__init__(context, novaclient, **kwargs) def execute(self, host_name, notification_uuid): def _filter_instances(instance_list): ha_enabled_instances = [] non_ha_enabled_instances = [] ha_enabled_key = CONF.host_failure.ha_enabled_instance_metadata_key for instance in instance_list: is_instance_ha_enabled = strutils.bool_from_string( instance.metadata.get(ha_enabled_key, False)) if CONF.host_failure.ignore_instances_in_error_state and ( getattr(instance, "OS-EXT-STS:vm_state") == "error"): if is_instance_ha_enabled: msg = ("Ignoring recovery of HA_Enabled instance " "'%(instance_id)s' as it is in 'error' state." ) % {'instance_id': instance.id} LOG.info(msg) self.update_details(msg, 0.4) continue if is_instance_ha_enabled: ha_enabled_instances.append(instance) else: non_ha_enabled_instances.append(instance) msg = "Total HA Enabled instances count: '%d'" % len( ha_enabled_instances) self.update_details(msg, 0.6) if CONF.host_failure.evacuate_all_instances: msg = ("Total Non-HA Enabled instances count: '%d'" % len( non_ha_enabled_instances)) self.update_details(msg, 0.7) ha_enabled_instances.extend(non_ha_enabled_instances) msg = ("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '%d'") % ( len(ha_enabled_instances)) self.update_details(msg, 0.8) return ha_enabled_instances msg = "Preparing instances for evacuation" self.update_details(msg) instance_list = self.novaclient.get_servers(self.context, host_name) msg = ("Total instances running on failed host '%(host_name)s' is " "%(instance_list)d") % {'host_name': host_name, 'instance_list': len(instance_list)} self.update_details(msg, 0.3) instance_list = _filter_instances(instance_list) if not instance_list: msg = ("Skipped host '%s' recovery as no instances needs to be " "evacuated" % host_name) self.update_details(msg, 1.0) LOG.info(msg) raise exception.SkipHostRecoveryException(message=msg) # persist vm moves for instance in instance_list: vmove = objects.VMove(context=self.context) vmove.instance_uuid = instance.id vmove.instance_name = instance.name vmove.notification_uuid = notification_uuid vmove.source_host = host_name vmove.status = fields.VMoveStatus.PENDING vmove.type = fields.VMoveType.EVACUATION vmove.create() # List of instance UUID instance_list = [instance.id for instance in instance_list] msg = "Instances to be evacuated are: '%s'" % ','.join(instance_list) self.update_details(msg, 1.0) class EvacuateInstancesTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["host_name", "notification_uuid"] self.update_host_method = kwargs['update_host_method'] super(EvacuateInstancesTask, self).__init__(context, novaclient, **kwargs) def _get_state_and_host_of_instance(self, context, instance): new_instance = self.novaclient.get_server(context, instance.id) instance_host = getattr(new_instance, "OS-EXT-SRV-ATTR:hypervisor_hostname") old_vm_state = getattr(instance, "OS-EXT-STS:vm_state") new_vm_state = getattr(new_instance, "OS-EXT-STS:vm_state") return (old_vm_state, new_vm_state, instance_host) def _stop_after_evacuation(self, context, instance): def _wait_for_stop_confirmation(): old_vm_state, new_vm_state, instance_host = ( self._get_state_and_host_of_instance(context, instance)) if new_vm_state == 'stopped': raise loopingcall.LoopingCallDone() try: # confirm instance is stopped after recovery self.novaclient.stop_server(context, instance.id) timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( _wait_for_stop_confirmation) timer.start(interval=CONF.verify_interval, timeout=CONF.wait_period_after_power_off).wait() except loopingcall.LoopingCallTimeOut: with excutils.save_and_reraise_exception(): msg = ("Instance '%(uuid)s' is successfully evacuated but " "timeout to stop.") % {'uuid': instance.id} LOG.warning(msg) finally: timer.stop() def _evacuate_and_confirm(self, context, vmove, reserved_host=None): def _update_vmove(vmove, status=None, start_time=None, end_time=None, dest_host=None, message=None): if status: vmove.status = status if start_time: vmove.start_time = start_time if end_time: vmove.end_time = end_time if dest_host: vmove.dest_host = dest_host if message: vmove.message = message vmove.save() instance_uuid = vmove.instance_uuid instance = self.novaclient.get_server(context, instance_uuid) # Before locking the instance check whether it is already locked # by user, if yes don't lock the instance instance_already_locked = self.novaclient.get_server( context, instance.id).locked if not instance_already_locked: # lock the instance so that until evacuation and confirmation # is not complete, user won't be able to perform any actions # on the instance. self.novaclient.lock_server(context, instance.id) def _wait_for_evacuation_confirmation(): old_vm_state, new_vm_state, instance_host = ( self._get_state_and_host_of_instance(context, instance)) if (new_vm_state == 'error' and new_vm_state != old_vm_state): raise exception.InstanceEvacuateFailed( instance_uuid=instance.id) if instance_host != vmove.source_host: if ((old_vm_state == 'error' and new_vm_state == 'active') or old_vm_state == new_vm_state): raise loopingcall.LoopingCallDone() def _wait_for_evacuation(): try: # add a timeout to the periodic call. timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( _wait_for_evacuation_confirmation) timer.start(interval=CONF.verify_interval, timeout=CONF.wait_period_after_evacuation).wait() except loopingcall.LoopingCallTimeOut: with excutils.save_and_reraise_exception(): msg = ("Timeout for instance '%(uuid)s' evacuation." % {'uuid': instance.id}) LOG.warning(msg) finally: # stop the periodic call, in case of exceptions or # Timeout. timer.stop() try: vm_state = getattr(instance, "OS-EXT-STS:vm_state") task_state = getattr(instance, "OS-EXT-STS:task_state") # Nova evacuates an instance only when vm_state is in active, # stopped or error state. If an instance is in other than active, # error and stopped vm_state, masakari resets the instance state # to *error* so that the instance can be evacuated. stop_instance = True if vm_state not in ['active', 'error', 'stopped']: self.novaclient.reset_instance_state(context, instance.id) instance = self.novaclient.get_server(context, instance.id) power_state = getattr(instance, "OS-EXT-STS:power_state") if vm_state == 'resized' and power_state != SHUTDOWN: stop_instance = False elif vm_state == 'stopped' and task_state is None: # If vm_state is stopped and task_state is none, the instance # will be recovered with vm_state 'stopped'. # So it doesn't need to stop the instance after evacuation. stop_instance = False elif task_state is not None: # Nova fails evacuation when the instance's task_state is not # none. In this case, masakari resets the instance's vm_state # to 'error' and task_state to none. self.novaclient.reset_instance_state(context, instance.id) instance = self.novaclient.get_server(context, instance.id) if vm_state == 'active': stop_instance = False # start to evacuate the instance _update_vmove( vmove, status=fields.VMoveStatus.ONGOING, start_time=timeutils.utcnow()) self.novaclient.evacuate_instance(context, instance.id, target=reserved_host) _wait_for_evacuation() if vm_state != 'active': if stop_instance: self._stop_after_evacuation(self.context, instance) # If the instance was in 'error' state before failure # it should be set to 'error' after recovery. if vm_state == 'error': self.novaclient.reset_instance_state( context, instance.id) instance = self.novaclient.get_server(context, instance_uuid) dest_host = getattr( instance, "OS-EXT-SRV-ATTR:hypervisor_hostname") _update_vmove( vmove, status=fields.VMoveStatus.SUCCEEDED, dest_host=dest_host) except loopingcall.LoopingCallTimeOut: # Instance is not stop in the expected time_limit. msg = "Failed reason: timeout." _update_vmove( vmove, status=fields.VMoveStatus.FAILED, message=msg) except Exception as e: # Exception is raised while resetting instance state or # evacuating the instance itself. LOG.warning(str(e)) _update_vmove( vmove, status=fields.VMoveStatus.FAILED, message=str(e)) finally: _update_vmove(vmove, end_time=timeutils.utcnow()) if not instance_already_locked: # Unlock the server after evacuation and confirmation self.novaclient.unlock_server(context, instance.id) def execute(self, host_name, notification_uuid, reserved_host=None): all_vmoves = objects.VMoveList.get_all_vmoves( self.context, notification_uuid, status=fields.VMoveStatus.PENDING) instance_list = [i.instance_uuid for i in all_vmoves] msg = ("Start evacuation of instances from failed host '%(host_name)s'" ", instance uuids are: '%(instance_list)s'") % { 'host_name': host_name, 'instance_list': ','.join(instance_list)} self.update_details(msg) def _do_evacuate(context, host_name, reserved_host=None): if reserved_host: msg = "Enabling reserved host: '%s'" % reserved_host self.update_details(msg, 0.1) if CONF.host_failure.add_reserved_host_to_aggregate: # Assign reserved_host to an aggregate to which the failed # compute host belongs to. aggregates = self.novaclient.get_aggregate_list(context) for aggregate in aggregates: if host_name in aggregate.hosts: try: msg = ("Add host %(reserved_host)s to " "aggregate %(aggregate)s") % { 'reserved_host': reserved_host, 'aggregate': aggregate.name} self.update_details(msg, 0.2) self.novaclient.add_host_to_aggregate( context, reserved_host, aggregate) msg = ("Added host %(reserved_host)s to " "aggregate %(aggregate)s") % { 'reserved_host': reserved_host, 'aggregate': aggregate.name} self.update_details(msg, 0.3) except exception.Conflict: msg = ("Host '%(reserved_host)s' already has " "been added to aggregate " "'%(aggregate)s'.") % { 'reserved_host': reserved_host, 'aggregate': aggregate.name} self.update_details(msg, 1.0) LOG.info(msg) self.novaclient.enable_disable_service( context, reserved_host, enable=True) # Set reserved property of reserved_host to False self.update_host_method(context, reserved_host) thread_pool = greenpool.GreenPool( CONF.host_failure_recovery_threads) nonlocal all_vmoves for vmove in all_vmoves: msg = ("Evacuation of instance started: '%s'" % vmove.instance_uuid) self.update_details(msg, 0.5) thread_pool.spawn_n(self._evacuate_and_confirm, self.context, vmove, reserved_host) thread_pool.waitall() all_vmoves = objects.VMoveList.get_all_vmoves( self.context, notification_uuid) succeeded_vmoves = [i.instance_uuid for i in all_vmoves if i.status == fields.VMoveStatus.SUCCEEDED] if succeeded_vmoves: succeeded_vmoves.sort() msg = ("Successfully evacuate instances '%(instance_list)s' " "from host '%(host_name)s'") % { 'instance_list': ','.join(succeeded_vmoves), 'host_name': host_name} self.update_details(msg, 0.7) failed_vmoves = [i.instance_uuid for i in all_vmoves if i.status == fields.VMoveStatus.FAILED] if failed_vmoves: msg = ("Failed to evacuate instances " "'%(instance_list)s' from host " "'%(host_name)s'") % { 'instance_list': ','.join(failed_vmoves), 'host_name': host_name} self.update_details(msg, 0.7) raise exception.HostRecoveryFailureException( message=msg) msg = "Evacuation process completed!" self.update_details(msg, 1.0) lock_name = reserved_host if reserved_host else None @utils.synchronized(lock_name) def do_evacuate_with_reserved_host(context, host_name, notification_uuid, reserved_host): _do_evacuate(context, host_name, reserved_host=reserved_host) if lock_name: do_evacuate_with_reserved_host(self.context, host_name, notification_uuid, reserved_host) else: # No need to acquire lock on reserved_host when recovery_method is # 'auto' as the selection of compute host will be decided by nova. _do_evacuate(self.context, host_name) def get_auto_flow(context, novaclient, process_what): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Disable compute service on source host 2. Get all HA_Enabled instances. 3. Evacuate all the HA_Enabled instances. 4. Confirm evacuation of instances. """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.host_auto_failure_recovery_tasks auto_evacuate_flow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow(task_dict['pre'], context=context, novaclient=novaclient, update_host_method=None): auto_evacuate_flow_pre.add(plugin) auto_evacuate_flow_main = linear_flow.Flow('main_tasks') for plugin in base.get_recovery_flow(task_dict['main'], context=context, novaclient=novaclient, update_host_method=None): auto_evacuate_flow_main.add(plugin) auto_evacuate_flow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow(task_dict['post'], context=context, novaclient=novaclient, update_host_method=None): auto_evacuate_flow_post.add(plugin) nested_flow.add(auto_evacuate_flow_pre) nested_flow.add(auto_evacuate_flow_main) nested_flow.add(auto_evacuate_flow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) def get_rh_flow(context, novaclient, process_what, **kwargs): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Disable compute service on source host 2. Get all HA_Enabled instances. 3. Evacuate all the HA_Enabled instances using reserved_host. 4. Confirm evacuation of instances. """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.host_rh_failure_recovery_tasks rh_evacuate_flow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow( task_dict['pre'], context=context, novaclient=novaclient, **kwargs): rh_evacuate_flow_pre.add(plugin) rh_evacuate_flow_main = linear_flow.Flow( "retry_%s" % flow_name, retry=retry.ParameterizedForEach( rebind=['reserved_host_list'], provides='reserved_host')) for plugin in base.get_recovery_flow( task_dict['main'], context=context, novaclient=novaclient, **kwargs): rh_evacuate_flow_main.add(plugin) rh_evacuate_flow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow( task_dict['post'], context=context, novaclient=novaclient, **kwargs): rh_evacuate_flow_post.add(plugin) nested_flow.add(rh_evacuate_flow_pre) nested_flow.add(rh_evacuate_flow_main) nested_flow.add(rh_evacuate_flow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/instance_failure.py0000664000175100017510000002150115033036143025657 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from oslo_utils import strutils from taskflow.patterns import linear_flow import masakari.conf from masakari.engine.drivers.taskflow import base from masakari import exception CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) ACTION = "instance:recovery" TASKFLOW_CONF = cfg.CONF.taskflow_driver_recovery_flows class StopInstanceTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["instance_uuid"] super(StopInstanceTask, self).__init__(context, novaclient, **kwargs) def execute(self, instance_uuid): """Stop the instance for recovery.""" instance = self.novaclient.get_server(self.context, instance_uuid) ha_enabled_key = CONF.instance_failure.ha_enabled_instance_metadata_key # If an instance is not HA_Enabled and "process_all_instances" config # option is also disabled, then there is no need to take any recovery # action. if not CONF.instance_failure.process_all_instances and not ( strutils.bool_from_string( instance.metadata.get(ha_enabled_key, False))): msg = ("Skipping recovery for instance: %(instance_uuid)s as it is" " not Ha_Enabled") % {'instance_uuid': instance_uuid} LOG.info(msg) self.update_details(msg, 1.0) raise exception.SkipInstanceRecoveryException() vm_state = getattr(instance, 'OS-EXT-STS:vm_state') if vm_state in ['paused', 'rescued']: msg = ("Recovery of instance '%(instance_uuid)s' is ignored as it " "is in '%(vm_state)s' state.") % { 'instance_uuid': instance_uuid, 'vm_state': vm_state } LOG.warning(msg) self.update_details(msg, 1.0) raise exception.IgnoreInstanceRecoveryException(msg) if vm_state != 'stopped': if vm_state == 'resized': self.novaclient.reset_instance_state( self.context, instance.id, 'active') msg = "Stopping instance: %s" % instance_uuid self.update_details(msg) try: self.novaclient.stop_server(self.context, instance.id) except exception.Conflict: msg = "Conflict when stopping instance: %s" % instance_uuid self.update_details(msg) instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(instance, 'OS-EXT-STS:vm_state') if vm_state != 'stopped': raise def _wait_for_power_off(): new_instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(new_instance, 'OS-EXT-STS:vm_state') if vm_state == 'stopped': raise loopingcall.LoopingCallDone() try: # add a timeout to the periodic call. timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( _wait_for_power_off) timer.start(interval=CONF.verify_interval, timeout=CONF.wait_period_after_power_off).wait() msg = "Stopped instance: '%s'" % instance_uuid self.update_details(msg, 1.0) except loopingcall.LoopingCallTimeOut: msg = "Failed to stop instance %(instance)s" % { 'instance': instance.id } self.update_details(msg, 1.0) raise exception.InstanceRecoveryFailureException( message=msg) finally: # stop the periodic call, in case of exceptions or Timeout. timer.stop() class StartInstanceTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["instance_uuid"] super(StartInstanceTask, self).__init__(context, novaclient, **kwargs) def execute(self, instance_uuid): """Start the instance.""" msg = "Starting instance: '%s'" % instance_uuid self.update_details(msg) instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(instance, 'OS-EXT-STS:vm_state') if vm_state == 'stopped': self.novaclient.start_server(self.context, instance.id) msg = "Instance started: '%s'" % instance_uuid self.update_details(msg, 1.0) else: msg = ("Invalid state for Instance %(instance)s. Expected state: " "'STOPPED', Actual state: '%(actual_state)s'") % { 'instance': instance_uuid, 'actual_state': vm_state } self.update_details(msg, 1.0) raise exception.InstanceRecoveryFailureException( message=msg) class ConfirmInstanceActiveTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["instance_uuid"] super(ConfirmInstanceActiveTask, self).__init__(context, novaclient, **kwargs) def execute(self, instance_uuid): def _wait_for_active(): new_instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(new_instance, 'OS-EXT-STS:vm_state') if vm_state == 'active': raise loopingcall.LoopingCallDone() try: msg = "Confirming instance '%s' vm_state is ACTIVE" % instance_uuid self.update_details(msg) # add a timeout to the periodic call. timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( _wait_for_active) timer.start(interval=CONF.verify_interval, timeout=CONF.wait_period_after_power_on).wait() msg = "Confirmed instance '%s' vm_state is ACTIVE" % instance_uuid self.update_details(msg, 1.0) except loopingcall.LoopingCallTimeOut: msg = "Failed to start instance %(instance)s" % { 'instance': instance_uuid } self.update_details(msg, 1.0) raise exception.InstanceRecoveryFailureException( message=msg) finally: # stop the periodic call, in case of exceptions or Timeout. timer.stop() def get_instance_recovery_flow(context, novaclient, process_what): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Stop the instance 2. Start the instance. 3. Confirm instance is in active state. """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.instance_failure_recovery_tasks instance_recovery_workflow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow(task_dict['pre'], context=context, novaclient=novaclient): instance_recovery_workflow_pre.add(plugin) instance_recovery_workflow_main = linear_flow.Flow('main_tasks') for plugin in base.get_recovery_flow(task_dict['main'], context=context, novaclient=novaclient): instance_recovery_workflow_main.add(plugin) instance_recovery_workflow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow(task_dict['post'], context=context, novaclient=novaclient): instance_recovery_workflow_post.add(plugin) nested_flow.add(instance_recovery_workflow_pre) nested_flow.add(instance_recovery_workflow_main) nested_flow.add(instance_recovery_workflow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/no_op.py0000664000175100017510000000176715033036143023472 0ustar00mylesmyles# Copyright 2018 NTT DATA. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from taskflow import task LOG = logging.getLogger(__name__) class Noop(task.Task): def __init__(self, context, novaclient, **kwargs): self.context = context self.novaclient = novaclient super(Noop, self).__init__() def execute(self, **kwargs): LOG.info("Custom task executed successfully..!!") return ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/drivers/taskflow/process_failure.py0000664000175100017510000001237415033036143025541 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from taskflow.patterns import linear_flow import masakari.conf from masakari.engine.drivers.taskflow import base from masakari import exception CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) ACTION = "process:recovery" TASKFLOW_CONF = cfg.CONF.taskflow_driver_recovery_flows class DisableComputeNodeTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["process_name", "host_name"] super(DisableComputeNodeTask, self).__init__(context, novaclient, **kwargs) def execute(self, process_name, host_name): msg = "Disabling compute service on host: '%s'" % host_name self.update_details(msg) if not self.novaclient.is_service_disabled(self.context, host_name, process_name): # disable compute node on given host self.novaclient.enable_disable_service(self.context, host_name, reason=CONF.process_failure.service_disable_reason) msg = "Disabled compute service on host: '%s'" % host_name self.update_details(msg, 1.0) else: msg = ("Skipping recovery for process %(process_name)s as it is " "already disabled") % {'process_name': process_name} LOG.info(msg) self.update_details(msg, 1.0) class ConfirmComputeNodeDisabledTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["process_name", "host_name"] super(ConfirmComputeNodeDisabledTask, self).__init__(context, novaclient, **kwargs) def execute(self, process_name, host_name): def _wait_for_disable(): service_disabled = self.novaclient.is_service_disabled( self.context, host_name, process_name) if service_disabled: raise loopingcall.LoopingCallDone() try: msg = "Confirming compute service is disabled on host: '%s'" % ( host_name) self.update_details(msg) # add a timeout to the periodic call. timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( _wait_for_disable) timer.start(interval=CONF.verify_interval, timeout=CONF.wait_period_after_service_update).wait() msg = "Confirmed compute service is disabled on host: '%s'" % ( host_name) self.update_details(msg, 1.0) except loopingcall.LoopingCallTimeOut: msg = "Failed to disable service %(process_name)s" % { 'process_name': process_name } self.update_details(msg, 1.0) raise exception.ProcessRecoveryFailureException( message=msg) finally: # stop the periodic call, in case of exceptions or Timeout. timer.stop() def get_compute_process_recovery_flow(context, novaclient, process_what): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Disable nova-compute process 2. Confirm nova-compute process is disabled """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.process_failure_recovery_tasks process_recovery_workflow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow(task_dict['pre'], context=context, novaclient=novaclient): process_recovery_workflow_pre.add(plugin) process_recovery_workflow_main = linear_flow.Flow('main_tasks') for plugin in base.get_recovery_flow(task_dict['main'], context=context, novaclient=novaclient): process_recovery_workflow_main.add(plugin) process_recovery_workflow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow(task_dict['post'], context=context, novaclient=novaclient): process_recovery_workflow_post.add(plugin) nested_flow.add(process_recovery_workflow_pre) nested_flow.add(process_recovery_workflow_main) nested_flow.add(process_recovery_workflow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/instance_events.py0000664000175100017510000000231415033036143022225 0ustar00mylesmyles# Copyright 2017 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ VM libvirt events These are the events which needs to be processed by masakari in case of instance recovery failure. """ INSTANCE_EVENTS = { # Add QEMU guest agent events here. 'QEMU_GUEST_AGENT_ERROR': ['STOPPED_FAILED'], # Add more events and vir_domain_events here. 'LIFECYCLE': ['STOPPED_FAILED'], 'IO_ERROR': ['IO_ERROR_REPORT'] } def is_valid_event(payload): vir_domain_event_list = INSTANCE_EVENTS.get(payload.get('event')) if vir_domain_event_list and payload.get( 'vir_domain_event') in vir_domain_event_list: return True return False ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/manager.py0000664000175100017510000004657515033036143020470 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Handles all processes relating to notifications. The :py:class:`MasakariManager` class is a :py:class:`masakari.manager.Manager` that handles RPC calls relating to notifications. It is responsible for processing notifications and executing workflows. """ import traceback from oslo_log import log as logging import oslo_messaging as messaging from oslo_service import periodic_task from oslo_utils import timeutils import masakari.conf from masakari.engine import driver from masakari.engine import instance_events as virt_events from masakari.engine import rpcapi from masakari.engine import utils as engine_utils from masakari import exception from masakari.i18n import _ from masakari import manager from masakari import objects from masakari.objects import fields from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def update_host_method(context, host_name, reserved=False): reserved_host = objects.Host.get_by_name(context, host_name) reserved_host.reserved = reserved reserved_host.save() class MasakariManager(manager.Manager): """Manages the running notifications""" RPC_API_VERSION = rpcapi.EngineAPI.RPC_API_VERSION target = messaging.Target(version=RPC_API_VERSION) def __init__(self, masakari_driver=None, *args, **kwargs): """Load configuration options""" LOG.debug("Initializing Masakari Manager.") super(MasakariManager, self).__init__(service_name="engine", *args, **kwargs) self.driver = driver.load_masakari_driver(masakari_driver) def _handle_notification_type_process(self, context, notification): notification_status = fields.NotificationStatus.FINISHED notification_event = notification.payload.get('event') process_name = notification.payload.get('process_name') exception_info = None if notification_event.upper() == 'STARTED': LOG.info("Notification type '%(type)s' received for host " "'%(host_uuid)s': '%(process_name)s' has been " "%(event)s.", {'type': notification.type, 'host_uuid': notification.source_host_uuid, 'process_name': process_name, 'event': notification_event}) elif notification_event.upper() == 'STOPPED': host_obj = objects.Host.get_by_uuid( context, notification.source_host_uuid) host_name = host_obj.name # Mark host on_maintenance mode as True update_data = { 'on_maintenance': True, } host_obj.update(update_data) host_obj.save() try: self.driver.execute_process_failure( context, process_name, host_name, notification.notification_uuid) except exception.SkipProcessRecoveryException: notification_status = fields.NotificationStatus.FINISHED except (exception.MasakariException, exception.ProcessRecoveryFailureException) as e: notification_status = fields.NotificationStatus.ERROR LOG.error("Failed to process notification '%(uuid)s'." " Reason: %(error)s", {"uuid": notification.notification_uuid, "error": e.message}) exception_info = e else: LOG.warning("Invalid event: %(event)s received for " "notification type: %(notification_type)s", {'event': notification_event, 'notification_type': notification.type}) notification_status = fields.NotificationStatus.IGNORED if exception_info: tb = traceback.format_exc() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.ERROR, exception=str(exception_info), tb=tb) else: engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.END) return notification_status def _handle_notification_type_instance(self, context, notification): if not virt_events.is_valid_event(notification.payload): LOG.info("Notification '%(uuid)s' received with payload " "%(payload)s is ignored.", {"uuid": notification.notification_uuid, "payload": notification.payload}) return fields.NotificationStatus.IGNORED notification_status = fields.NotificationStatus.FINISHED exception_info = None try: self.driver.execute_instance_failure( context, notification.payload.get('instance_uuid'), notification.notification_uuid) except exception.IgnoreInstanceRecoveryException as e: notification_status = fields.NotificationStatus.IGNORED exception_info = e except exception.SkipInstanceRecoveryException: notification_status = fields.NotificationStatus.FINISHED except (exception.MasakariException, exception.InstanceRecoveryFailureException) as e: notification_status = fields.NotificationStatus.ERROR LOG.error("Failed to process notification '%(uuid)s'." " Reason: %(error)s", {"uuid": notification.notification_uuid, "error": e.message}) exception_info = e if exception_info: tb = traceback.format_exc() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.ERROR, exception=str(exception_info), tb=tb) else: engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.END) return notification_status def _handle_notification_type_host(self, context, notification): host_status = notification.payload.get('host_status') notification_status = fields.NotificationStatus.FINISHED notification_event = notification.payload.get('event') exception_info = None if host_status is None: LOG.warning("Notification '%(uuid)s' ignored as host_status is " "not provided.", {'uuid': notification.notification_uuid}) notification_status = fields.NotificationStatus.IGNORED elif host_status.upper() != fields.HostStatusType.NORMAL: # NOTE(shilpasd): Avoid host recovery for host_status other than # 'NORMAL' otherwise it could lead to unsafe evacuation of # instances running on the failed source host. LOG.warning("Notification '%(uuid)s' ignored as host_status " "is '%(host_status)s'", {'uuid': notification.notification_uuid, 'host_status': host_status.upper()}) notification_status = fields.NotificationStatus.IGNORED elif notification_event.upper() == 'STARTED': LOG.info("Notification type '%(type)s' received for host " "'%(host_uuid)s' has been %(event)s.", {'type': notification.type, 'host_uuid': notification.source_host_uuid, 'event': notification_event}) elif notification_event.upper() == 'STOPPED': host_obj = objects.Host.get_by_uuid( context, notification.source_host_uuid) host_name = host_obj.name recovery_method = host_obj.failover_segment.recovery_method # Mark host on_maintenance mode as True update_data = { 'on_maintenance': True, } # Set reserved flag to False if this host is reserved if host_obj.reserved: update_data['reserved'] = False LOG.info("Set host %s on maintenance.", host_name) host_obj.update(update_data) host_obj.save() reserved_host_list = None if not recovery_method == ( fields.FailoverSegmentRecoveryMethod.AUTO): reserved_host_object_list = objects.HostList.get_all( context, filters={ 'failover_segment_id': host_obj.failover_segment.uuid, 'reserved': True, 'on_maintenance': False }) # Create list of host name from reserved_host_object_list reserved_host_list = [host.name for host in reserved_host_object_list] try: self.driver.execute_host_failure( context, host_name, recovery_method, notification.notification_uuid, update_host_method=update_host_method, reserved_host_list=reserved_host_list) except exception.SkipHostRecoveryException: notification_status = fields.NotificationStatus.FINISHED except (exception.HostRecoveryFailureException, exception.ReservedHostsUnavailable, exception.MasakariException) as e: notification_status = fields.NotificationStatus.ERROR LOG.error("Failed to process notification '%(uuid)s'." " Reason: %(error)s", {"uuid": notification.notification_uuid, "error": e.message}) exception_info = e else: LOG.warning("Invalid event: %(event)s received for " "notification type: %(type)s", {'event': notification_event, 'type': notification.type}) notification_status = fields.NotificationStatus.IGNORED if exception_info: tb = traceback.format_exc() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.ERROR, exception=str(exception_info), tb=tb) else: engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.END) return notification_status def _process_notification(self, context, notification): @utils.synchronized(notification.source_host_uuid, blocking=True) def do_process_notification(notification): LOG.info('Processing notification %(notification_uuid)s of ' 'type: %(type)s', {'notification_uuid': notification.notification_uuid, 'type': notification.type}) # Get notification from db notification_db = objects.Notification.get_by_uuid(context, notification.notification_uuid) # NOTE(tpatil): To fix bug 1773132, process notification only # if the notification status is New and the current notification # from DB status is Not New to avoid recovering from failure twice if (notification.status == fields.NotificationStatus.NEW and notification_db.status != fields.NotificationStatus.NEW): LOG.warning("Processing of notification is skipped to avoid " "recovering from failure twice. " "Notification received is '%(uuid)s' " "and it's status is '%(new_status)s' and the " "current status of same notification in db " "is '%(old_status)s'", {"uuid": notification.notification_uuid, "new_status": notification.status, "old_status": notification_db.status}) return update_data = { 'status': fields.NotificationStatus.RUNNING, } notification.update(update_data) notification.save() if notification.type == fields.NotificationType.PROCESS: notification_status = self._handle_notification_type_process( context, notification) elif notification.type == fields.NotificationType.VM: notification_status = self._handle_notification_type_instance( context, notification) elif notification.type == fields.NotificationType.COMPUTE_HOST: notification_status = self._handle_notification_type_host( context, notification) LOG.info("Notification %(notification_uuid)s exits with " "status: %(status)s.", {'notification_uuid': notification.notification_uuid, 'status': notification_status}) update_data = { 'status': notification_status } notification.update(update_data) notification.save() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.START) do_process_notification(notification) def process_notification(self, context, notification=None): """Processes the notification""" host = objects.Host.get_by_uuid( context, notification.source_host_uuid) if not host.failover_segment.enabled: update_data = { 'status': fields.NotificationStatus.IGNORED, 'message': 'Recovery aboart: the segment HA is disabled.', } notification.update(update_data) notification.save() msg = ('Notification %(notification_uuid)s of type: %(type)s ' 'is ignored, because the failover segment is disabled.', {'notification_uuid': notification.notification_uuid, 'type': notification.type}) raise exception.FailoverSegmentDisabled(msg) self._process_notification(context, notification) @periodic_task.periodic_task( spacing=CONF.process_unfinished_notifications_interval) def _process_unfinished_notifications(self, context): filters = { 'status': [fields.NotificationStatus.ERROR, fields.NotificationStatus.NEW] } notifications_list = objects.NotificationList.get_all(context, filters=filters) for notification in notifications_list: if (notification.status == fields.NotificationStatus.ERROR or (notification.status == fields.NotificationStatus.NEW and timeutils.is_older_than( notification.generated_time, CONF.retry_notification_new_status_interval))): self._process_notification(context, notification) # get updated notification from db after workflow execution notification_db = objects.Notification.get_by_uuid( context, notification.notification_uuid) if notification_db.status == fields.NotificationStatus.ERROR: # update notification status as failed notification_status = fields.NotificationStatus.FAILED update_data = { 'status': notification_status } notification_db.update(update_data) notification_db.save() LOG.error( "Periodic task 'process_unfinished_notifications': " "Notification %(notification_uuid)s exits with " "status: %(status)s.", {'notification_uuid': notification.notification_uuid, 'status': notification_status}) @periodic_task.periodic_task( spacing=CONF.check_expired_notifications_interval) def _check_expired_notifications(self, context): filters = { 'status': [fields.NotificationStatus.RUNNING, fields.NotificationStatus.ERROR, fields.NotificationStatus.NEW] } notifications_list = objects.NotificationList.get_all(context, filters=filters) for notification in notifications_list: if timeutils.is_older_than( notification.generated_time, CONF.notifications_expired_interval): # update running expired notification status as failed notification_status = fields.NotificationStatus.FAILED update_data = { 'status': notification_status } notification.update(update_data) notification.save() LOG.error( "Periodic task 'check_expired_notifications': " "Notification %(notification_uuid)s is expired.", {'notification_uuid': notification.notification_uuid}) def get_notification_recovery_workflow_details(self, context, notification): """Retrieve recovery workflow details of the notification""" try: host_obj = objects.Host.get_by_uuid( context, notification.source_host_uuid) recovery_method = host_obj.failover_segment.recovery_method progress_details = ( self.driver.get_notification_recovery_workflow_details( context, recovery_method, notification)) notification['recovery_workflow_details'] = progress_details except Exception: msg = (_('Failed to fetch notification recovery workflow details ' 'for %s') % notification.notification_uuid) LOG.exception(msg) raise exception.MasakariException(msg) return notification ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/rpcapi.py0000664000175100017510000000403515033036143020315 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_messaging as messaging import masakari.conf from masakari.objects import base as objects_base from masakari import rpc CONF = masakari.conf.CONF class EngineAPI(rpc.RPCAPI): """Client side of the engine rpc API. API version history: .. code-block:: none 1.0 - Initial version. 1.1 - Added get_notification_recovery_workflow_details method to retrieve progress details from notification driver. """ RPC_API_VERSION = '1.1' TOPIC = CONF.masakari_topic BINARY = 'masakari-engine' def __init__(self): super(EngineAPI, self).__init__() target = messaging.Target(topic=self.TOPIC, version=self.RPC_API_VERSION) serializer = objects_base.MasakariObjectSerializer() self.client = rpc.get_client(target, serializer=serializer) def process_notification(self, context, notification): version = '1.0' cctxt = self.client.prepare(version=version) cctxt.cast(context, 'process_notification', notification=notification) def get_notification_recovery_workflow_details(self, context, notification): version = '1.1' cctxt = self.client.prepare(version=version) return cctxt.call(context, 'get_notification_recovery_workflow_details', notification=notification) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/engine/utils.py0000664000175100017510000000446015033036143020201 0ustar00mylesmyles# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket from masakari.notifications.objects import base as notification_base from masakari.notifications.objects import exception as notification_exception from masakari.notifications.objects import notification as event_notification from masakari.objects import fields def _get_fault_and_priority_from_exc_and_tb(exception, tb): fault = None priority = fields.EventNotificationPriority.INFO if exception: priority = fields.EventNotificationPriority.ERROR fault = notification_exception.ExceptionPayload.from_exc_and_traceback( exception, tb) return fault, priority def notify_about_notification_update(context, notification, action, phase=None, binary='masakari-engine', exception=None, tb=None): """Send versioned notification about a notification update. :param notification: Notification object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.NotificationApiPayload( notification=notification, fault=fault) engine_notification = event_notification.NotificationApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) engine_notification.emit(context) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/exception.py0000664000175100017510000002711615033036143017575 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Masakari base exception handling. Includes decorator for re-raising Masakari-type exceptions. SHOULD include dedicated exception logging. """ import functools from http import HTTPStatus import inspect import sys from oslo_log import log as logging from oslo_utils import excutils import webob.exc from webob import util as woutil import masakari.conf from masakari.i18n import _ from masakari import safe_utils from masakari import utils LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF class ConvertedException(webob.exc.WSGIHTTPException): def __init__(self, code, title="", explanation=""): self.code = int(code) # There is a strict rule about constructing status line for HTTP: # '...Status-Line, consisting of the protocol version followed by a # numeric status code and its associated textual phrase, with each # element separated by SP characters' # (http://www.faqs.org/rfcs/rfc2616.html) # 'code' and 'title' can not be empty because they correspond # to numeric status code and its associated text if title: self.title = title else: try: self.title = woutil.status_reasons[self.code] except KeyError: msg = "Improper or unknown HTTP status code used: %d" LOG.error(msg, code) self.title = woutil.status_generic_reasons[self.code // 100] self.explanation = explanation super(ConvertedException, self).__init__() def _cleanse_dict(original): """Strip all admin_password, new_pass, rescue_pass keys from a dict.""" return {k: v for k, v in original.items() if "_pass" not in k} def wrap_exception(notifier=None, get_notifier=None): """This decorator wraps a method to catch any exceptions that may get thrown. It also optionally sends the exception to the notification system. """ def inner(f): def wrapped(self, context, *args, **kw): # Don't store self or context in the payload, it now seems to # contain confidential information. try: return f(self, context, *args, **kw) except Exception as e: with excutils.save_and_reraise_exception(): if notifier or get_notifier: payload = dict(exception=e) wrapped_func = safe_utils.get_wrapped_function(f) call_dict = inspect.getcallargs(wrapped_func, self, context, *args, **kw) # self can't be serialized and shouldn't be in the # payload call_dict.pop('self', None) cleansed = _cleanse_dict(call_dict) payload.update({'args': cleansed}) # If f has multiple decorators, they must use # functools.wraps to ensure the name is # propagated. event_type = f.__name__ (notifier or get_notifier()).error(context, event_type, payload) return functools.wraps(f)(wrapped) return inner class MasakariException(Exception): """Base Masakari Exception To correctly use this class, inherit from it and define a 'msg_fmt' property. That msg_fmt will get printf'd with the keyword arguments provided to the constructor. """ msg_fmt = _("An unknown exception occurred.") code = HTTPStatus.INTERNAL_SERVER_ERROR headers = {} safe = False def __init__(self, message=None, **kwargs): self.kwargs = kwargs if 'code' not in self.kwargs: try: self.kwargs['code'] = self.code except AttributeError: pass if not message: try: message = self.msg_fmt % kwargs except Exception: exc_info = sys.exc_info() # kwargs doesn't match a variable in the message # log the issue and the kwargs LOG.exception('Exception in string format operation') for name, value in kwargs.items(): LOG.error("%s: %s" % (name, value)) # noqa if CONF.fatal_exception_format_errors: utils.reraise(*exc_info) else: # at least get the core message out if something happened message = self.msg_fmt self.message = message super(MasakariException, self).__init__(message) def format_message(self): # NOTE: use the first argument to the python Exception object # which should be our full MasakariException message, (see __init__) return self.args[0] class APIException(MasakariException): msg_fmt = _("Error while requesting %(service)s API.") def __init__(self, message=None, **kwargs): if 'service' not in kwargs: kwargs['service'] = 'unknown' super(APIException, self).__init__(message, **kwargs) class APITimeout(APIException): msg_fmt = _("Timeout while requesting %(service)s API.") class Conflict(MasakariException): msg_fmt = _("Conflict") code = HTTPStatus.CONFLICT class Invalid(MasakariException): msg_fmt = _("Bad Request - Invalid Parameters") code = HTTPStatus.BAD_REQUEST class InvalidName(Invalid): msg_fmt = _("An invalid 'name' value was provided. " "The name must be: %(reason)s") class InvalidInput(Invalid): msg_fmt = _("Invalid input received: %(reason)s") class InvalidAPIVersionString(Invalid): msg_fmt = _("API Version String %(version)s is of invalid format. Must " "be of format MajorNum.MinorNum.") class MalformedRequestBody(MasakariException): msg_fmt = _("Malformed message body: %(reason)s") # NOTE: NotFound should only be used when a 404 error is # appropriate to be returned class NotFound(MasakariException): msg_fmt = _("Resource could not be found.") code = HTTPStatus.NOT_FOUND class ConfigNotFound(NotFound): msg_fmt = _("Could not find config at %(path)s") class Forbidden(MasakariException): msg_fmt = _("Forbidden") code = HTTPStatus.FORBIDDEN class AdminRequired(Forbidden): msg_fmt = _("User does not have admin privileges") class PolicyNotAuthorized(Forbidden): msg_fmt = _("Policy doesn't allow %(action)s to be performed.") class PasteAppNotFound(MasakariException): msg_fmt = _("Could not load paste app '%(name)s' from %(path)s") class InvalidContentType(Invalid): msg_fmt = _("Invalid content type %(content_type)s.") class VersionNotFoundForAPIMethod(Invalid): msg_fmt = _("API version %(version)s is not supported on this method.") class InvalidGlobalAPIVersion(Invalid): msg_fmt = _("Version %(req_ver)s is not supported by the API. Minimum " "is %(min_ver)s and maximum is %(max_ver)s.") class ApiVersionsIntersect(Invalid): msg_fmt = _("Version of %(name) %(min_ver) %(max_ver) intersects " "with another versions.") class ValidationError(Invalid): msg_fmt = "%(detail)s" class InvalidSortKey(Invalid): msg_fmt = _("Sort key supplied was not valid.") class MarkerNotFound(NotFound): msg_fmt = _("Marker %(marker)s could not be found.") class FailoverSegmentNotFound(NotFound): msg_fmt = _("No failover segment with id %(id)s.") class HostNotFound(NotFound): msg_fmt = _("No host with id %(id)s.") class NotificationNotFound(NotFound): msg_fmt = _("No notification with id %(id)s.") class FailoverSegmentNotFoundByName(FailoverSegmentNotFound): msg_fmt = _("Failover segment with name %(segment_name)s could not " "be found.") class HostNotFoundByName(HostNotFound): msg_fmt = _("Host with name %(host_name)s could not be found.") class ComputeNotFoundByName(NotFound): msg_fmt = _("Compute service with name %(compute_name)s could not " "be found.") class VMoveNotFound(NotFound): msg_fmt = _("No vm move with id %(id)s.") class NotificationWithoutVMoves(Invalid): msg_fmt = _("This notification %(id)s without vm moves.") class FailoverSegmentExists(MasakariException): msg_fmt = _("Failover segment with name %(name)s already exists.") class HostExists(MasakariException): msg_fmt = _("Host with name %(name)s already exists.") class Unauthorized(MasakariException): msg_fmt = _("Not authorized.") code = HTTPStatus.UNAUTHORIZED class ObjectActionError(MasakariException): msg_fmt = _('Object action %(action)s failed because: %(reason)s') class OrphanedObjectError(MasakariException): msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object') class DuplicateNotification(Invalid): msg_fmt = _('Duplicate notification received for type: %(type)s') code = HTTPStatus.CONFLICT class HostOnMaintenanceError(Invalid): msg_fmt = _('Host %(host_name)s is already under maintenance.') code = HTTPStatus.CONFLICT class HostRecoveryFailureException(MasakariException): msg_fmt = _('Failed to execute host recovery.') class InstanceRecoveryFailureException(MasakariException): msg_fmt = _('Failed to execute instance recovery workflow.') class SkipInstanceRecoveryException(MasakariException): msg_fmt = _('Skipping execution of instance recovery workflow.') class SkipProcessRecoveryException(MasakariException): msg_fmt = _('Skipping execution of process recovery workflow.') class SkipHostRecoveryException(MasakariException): msg_fmt = _('Skipping execution of host recovery workflow.') class ProcessRecoveryFailureException(MasakariException): msg_fmt = _('Failed to execute process recovery workflow.') class DBNotAllowed(MasakariException): msg_fmt = _('%(binary)s attempted direct database access which is ' 'not allowed by policy') class FailoverSegmentInUse(Conflict): msg_fmt = _("Failover segment %(uuid)s can't be updated as it is in-use " "to process notifications.") class HostInUse(Conflict): msg_fmt = _("Host %(uuid)s can't be updated as it is in-use to process " "notifications.") class ReservedHostsUnavailable(MasakariException): msg_fmt = _('No reserved_hosts available for evacuation.') class LockAlreadyAcquired(MasakariException): msg_fmt = _('Lock is already acquired on %(resource)s.') class IgnoreInstanceRecoveryException(MasakariException): msg_fmt = _('Instance recovery is ignored.') class HostNotFoundUnderFailoverSegment(HostNotFound): msg_fmt = _("Host '%(host_uuid)s' under failover_segment " "'%(segment_uuid)s' could not be found.") class InstanceEvacuateFailed(MasakariException): msg_fmt = _("Failed to evacuate instance %(instance_uuid)s") class FailoverSegmentDisabled(MasakariException): msg_fmt = _('Failover segment is disabled.') ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/ha/0000775000175100017510000000000015033036146015611 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/ha/__init__.py0000664000175100017510000000000015033036143017705 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/ha/api.py0000664000175100017510000004256315033036143016743 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import traceback from oslo_log import log as logging from oslo_utils import excutils from oslo_utils import strutils from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari.compute import nova import masakari.conf from masakari.coordination import synchronized from masakari.engine import rpcapi as engine_rpcapi from masakari import exception from masakari.i18n import _ from masakari import objects from masakari.objects import fields CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def is_failover_segment_under_recovery(segment): filters = { 'status': [fields.NotificationStatus.NEW, fields.NotificationStatus.RUNNING, fields.NotificationStatus.ERROR] } return segment.is_under_recovery(filters=filters) class FailoverSegmentAPI(object): def get_segment(self, context, segment_uuid): """Get a single failover segment with the given segment_uuid.""" if uuidutils.is_uuid_like(segment_uuid): LOG.debug("Fetching failover segment by uuid %s", segment_uuid) segment = objects.FailoverSegment.get_by_uuid(context, segment_uuid) else: LOG.debug("Failed to fetch failover " "segment by uuid %s", segment_uuid) raise exception.FailoverSegmentNotFound(id=segment_uuid) return segment def get_all(self, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all failover segments filtered by one of the given parameters. If there is no filter it will retrieve all segments in the system. The results will be sorted based on the list of sort keys in the 'sort_keys' parameter (first value is primary sort key, second value is secondary sort ket, etc.). For each sort key, the associated sort direction is based on the list of sort directions in the 'sort_dirs' parameter. """ LOG.debug("Searching by: %s", str(filters)) limited_segments = (objects.FailoverSegmentList. get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker)) return limited_segments def create_segment(self, context, segment_data): """Create segment""" segment = objects.FailoverSegment(context=context) # Populate segment object for create segment.name = segment_data.get('name') segment.description = segment_data.get('description') segment.recovery_method = segment_data.get('recovery_method') segment.service_type = segment_data.get('service_type') segment.enabled = strutils.bool_from_string( segment_data.get('enabled', True), strict=True) try: segment.create() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_segment_api(context, segment, action=fields.EventNotificationAction.SEGMENT_CREATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return segment def update_segment(self, context, uuid, segment_data): """Update the properties of a failover segment.""" segment = objects.FailoverSegment.get_by_uuid(context, uuid) if is_failover_segment_under_recovery(segment): msg = _("Failover segment %s can't be updated as " "it is in-use to process notifications.") % uuid LOG.error(msg) raise exception.FailoverSegmentInUse(msg) try: segment.update(segment_data) segment.save() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_segment_api(context, segment, action=fields.EventNotificationAction.SEGMENT_UPDATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return segment def delete_segment(self, context, uuid): """Deletes the segment.""" segment = objects.FailoverSegment.get_by_uuid(context, uuid) if is_failover_segment_under_recovery(segment): msg = _("Failover segment (%s) can't be deleted as " "it is in-use to process notifications.") % uuid LOG.error(msg) raise exception.FailoverSegmentInUse(msg) try: segment.destroy() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_segment_api(context, segment, action=fields.EventNotificationAction.SEGMENT_DELETE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) class HostAPI(object): """The Host API to manage hosts""" def _is_valid_host_name(self, context, name): novaclient = nova.API() novaclient.find_compute_service(context, name) def get_host(self, context, segment_uuid, host_uuid): """Get a host by id""" if uuidutils.is_uuid_like(host_uuid): LOG.debug("Fetching host by uuid %s", host_uuid) host = objects.Host.get_by_uuid( context, host_uuid, segment_uuid=segment_uuid) else: LOG.debug("Failed to fetch host by uuid %s", host_uuid) raise exception.HostNotFound(id=host_uuid) return host def get_all(self, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all hosts by filter""" LOG.debug("Searching by: %s", str(filters)) limited_hosts = objects.HostList.get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return limited_hosts def create_host(self, context, segment_uuid, host_data): """Create host""" segment = objects.FailoverSegment.get_by_uuid(context, segment_uuid) host = objects.Host(context=context) # Populate host object for create host.name = host_data.get('name') host.failover_segment = segment host.type = host_data.get('type') host.control_attributes = host_data.get('control_attributes') host.on_maintenance = strutils.bool_from_string( host_data.get('on_maintenance', False), strict=True) host.reserved = strutils.bool_from_string( host_data.get('reserved', False), strict=True) self._is_valid_host_name(context, host.name) try: host.create() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_host_api(context, host, action=fields.EventNotificationAction.HOST_CREATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return host def update_host(self, context, segment_uuid, id, host_data): """Update the host""" host = objects.Host.get_by_uuid( context, id, segment_uuid=segment_uuid) if is_failover_segment_under_recovery(host.failover_segment): msg = _("Host %s can't be updated as " "it is in-use to process notifications.") % host.uuid LOG.error(msg) raise exception.HostInUse(msg) if 'name' in host_data: self._is_valid_host_name(context, host_data.get('name')) if 'on_maintenance' in host_data: host_data['on_maintenance'] = strutils.bool_from_string( host_data['on_maintenance'], strict=True) if 'reserved' in host_data: host_data['reserved'] = strutils.bool_from_string( host_data['reserved'], strict=True) try: host.update(host_data) host.save() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_host_api(context, host, action=fields.EventNotificationAction.HOST_UPDATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return host def delete_host(self, context, segment_uuid, id): """Delete the host""" host = objects.Host.get_by_uuid(context, id, segment_uuid=segment_uuid) if is_failover_segment_under_recovery(host.failover_segment): msg = _("Host %s can't be deleted as " "it is in-use to process notifications.") % host.uuid LOG.error(msg) raise exception.HostInUse(msg) try: host.destroy() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_host_api(context, host, action=fields.EventNotificationAction.HOST_DELETE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) class NotificationAPI(object): def __init__(self): self.engine_rpcapi = engine_rpcapi.EngineAPI() @staticmethod def _is_duplicate_notification(context, notification): # Get all the notifications by filters filters = { 'type': notification.type, 'source_host_uuid': notification.source_host_uuid, 'generated-since': (notification.generated_time - datetime.timedelta( seconds=CONF.duplicate_notification_detection_interval)) } notifications_list = objects.NotificationList.get_all(context, filters=filters) for db_notification in notifications_list: # if payload is same notification should be considered as # duplicate if db_notification.payload == notification.payload: return True return False def _create_notification(self, context, notification_data): # Check whether host from which the notification came is already # present in failover segment or not host_name = notification_data.get('hostname') host_object = objects.Host.get_by_name(context, host_name) host_on_maintenance = host_object.on_maintenance if host_on_maintenance: message = (_("Notification received from host %(host)s of type " "'%(type)s' is ignored as the host is already under " "maintenance.") % { 'host': host_name, 'type': notification_data.get('type') }) raise exception.HostOnMaintenanceError(message=message) notification = objects.Notification(context=context) # Populate notification object for create notification.type = notification_data.get('type') notification.generated_time = notification_data.get('generated_time') notification.source_host_uuid = host_object.uuid notification.payload = notification_data.get('payload') notification.status = fields.NotificationStatus.NEW segment = host_object.failover_segment notification.failover_segment_uuid = segment.uuid if self._is_duplicate_notification(context, notification): message = (_("Notification received from host %(host)s of " "type %(type)s is duplicate.") % {'host': host_name, 'type': notification.type}) raise exception.DuplicateNotification(message=message) try: notification.create() self.engine_rpcapi.process_notification(context, notification) except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_notification_api( context, notification, action=fields.EventNotificationAction.NOTIFICATION_CREATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return notification @synchronized('create_host_notification-{notification_data[hostname]}') def _create_host_type_notification(self, context, notification_data): return self._create_notification(context, notification_data) def create_notification(self, context, notification_data): """Create notification""" create_notification_function = '_create_notification' if notification_data.get('type') == \ fields.NotificationType.COMPUTE_HOST: create_notification_function = '_create_host_type_notification' return getattr(self, create_notification_function)(context, notification_data) def get_all(self, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all notifications filtered by one of the given parameters. If there is no filter it will retrieve all notifications in the system. The results will be sorted based on the list of sort keys in the 'sort_keys' parameter (first value is primary sort key, second value is secondary sort ket, etc.). For each sort key, the associated sort direction is based on the list of sort directions in the 'sort_dirs' parameter. """ LOG.debug("Searching by: %s", str(filters)) limited_notifications = (objects.NotificationList. get_all(context, filters, sort_keys, sort_dirs, limit, marker)) return limited_notifications def get_notification(self, context, notification_uuid): """Get a single notification with the given notification_uuid.""" if uuidutils.is_uuid_like(notification_uuid): LOG.debug("Fetching notification by uuid %s", notification_uuid) notification = objects.Notification.get_by_uuid(context, notification_uuid) else: LOG.debug("Failed to fetch notification by " "uuid %s", notification_uuid) raise exception.NotificationNotFound(id=notification_uuid) return notification def get_notification_recovery_workflow_details(self, context, notification_uuid): """Get recovery workflow details details of the notification""" notification = self.get_notification(context, notification_uuid) LOG.debug("Fetching recovery workflow details of a notification %s ", notification_uuid) notification = (self.engine_rpcapi. get_notification_recovery_workflow_details( context, notification)) return notification class VMoveAPI(object): """The vmoves API to manage vmoves""" def _is_valid_notification(self, context, notification_uuid): notification = objects.Notification.get_by_uuid( context, notification_uuid) if notification.type != fields.NotificationType.COMPUTE_HOST: raise exception.NotificationWithoutVMoves(id=notification_uuid) def get_all(self, context, notification_uuid, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all vmoves by filters""" self._is_valid_notification(context, notification_uuid) filters['notification_uuid'] = notification_uuid vmoves = objects.VMoveList.get_all( context, filters, sort_keys, sort_dirs, limit, marker) return vmoves def get_vmove(self, context, notification_uuid, vmove_uuid): """Get one vmove.""" self._is_valid_notification(context, notification_uuid) if uuidutils.is_uuid_like(vmove_uuid): LOG.debug("Fetching vmove by uuid %s", vmove_uuid) vmove = objects.VMove.get_by_uuid(context, vmove_uuid) else: LOG.debug("Failed to fetch vmove by uuid %s", vmove_uuid) raise exception.VMoveNotFound(id=vmove_uuid) return vmove ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/hacking/0000775000175100017510000000000015033036146016625 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/hacking/__init__.py0000664000175100017510000000000015033036143020721 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/hacking/checks.py0000664000175100017510000003725215033036143020445 0ustar00mylesmyles# Copyright (c) 2016, NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from hacking import core """ Guidelines for writing new hacking checks - Use only for Masakari specific tests. OpenStack general tests should be submitted to the common 'hacking' module. - Pick numbers in the range M3xx. Find the current test with the highest allocated number and then pick the next value. - Keep the test method code in the source file ordered based on the M3xx value. - List the new rule in the top level HACKING.rst file - Add test cases for each new rule to masakari/tests/unit/test_hacking.py """ UNDERSCORE_IMPORT_FILES = [] session_check = re.compile(r"\w*def [a-zA-Z0-9].*[(].*session.*[)]") cfg_re = re.compile(r".*\scfg\.") cfg_opt_re = re.compile(r".*[\s\[]cfg\.[a-zA-Z]*Opt\(") rule_default_re = re.compile(r".*RuleDefault\(") policy_enforce_re = re.compile(r".*_ENFORCER\.enforce\(") asse_trueinst_re = re.compile( r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " r"(\w|\.|\'|\"|\[|\])+\)\)") asse_equal_type_re = re.compile( r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " r"(\w|\.|\'|\"|\[|\])+\)") asse_equal_in_end_with_true_or_false_re = re.compile( r"assertEqual\("r"(\w|[][.'\"])+ in (\w|[][.'\", ])+, (True|False)\)") asse_equal_in_start_with_true_or_false_re = re.compile( r"assertEqual\("r"(True|False), (\w|[][.'\"])+ in (\w|[][.'\", ])+\)") asse_equal_end_with_none_re = re.compile( r"assertEqual\(.*?,\s+None\)$") asse_equal_start_with_none_re = re.compile( r"assertEqual\(None,") # NOTE(abhishekk): Next two regexes weren't united to one for more readability. # asse_true_false_with_in_or_not_in regex checks # assertTrue/False(A in B) cases where B argument has no spaces # asse_true_false_with_in_or_not_in_spaces regex checks cases # where B argument has spaces and starts/ends with [, ', ". # For example: [1, 2, 3], "some string", 'another string'. # We have to separate these regexes to escape a false positives # results. B argument should have spaces only if it starts # with [, ", '. Otherwise checking of string # "assertFalse(A in B and C in D)" will be false positives. # In this case B argument is "B and C in D". asse_true_false_with_in_or_not_in = re.compile( r"assert(True|False)\("r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])" r"+(, .*)?\)") asse_true_false_with_in_or_not_in_spaces = re.compile( r"assert(True|False)"r"\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|" r"[][.'\", ])+[\[|'|\"](, .*)?\)") asse_raises_regexp = re.compile(r"assertRaisesRegexp\(") conf_attribute_set_re = re.compile(r"CONF\.[a-z0-9_.]+\s*=\s*\w") translated_log = re.compile( r"(.)*LOG\.(audit|error|info|critical|exception)" r"\(\s*_\(\s*('|\")") mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])") string_translation = re.compile(r"[^_]*_\(\s*('|\")") underscore_import_check = re.compile(r"(.)*import _(.)*") import_translation_for_log_or_exception = re.compile( r"(.)*(from\smasakari.i18n\simport)\s_") # We need this for cases where they have created their own _ function. custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*") dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)") http_not_implemented_re = re.compile(r"raise .*HTTPNotImplemented\(") spawn_re = re.compile( r".*(eventlet|greenthread)\.(?Pspawn(_n)?)\(.*\)") contextlib_nested = re.compile(r"^with (contextlib\.)?nested\(") doubled_words_re = re.compile( r"\b(then?|[iao]n|i[fst]|but|f?or|at|and|[dt]o)\s+\1\b") yield_not_followed_by_space = re.compile(r"^\s*yield(?:\(|{|\[|\"|').*$") _all_log_levels = {'critical', 'error', 'exception', 'info', 'warning', 'debug'} _all_hints = {'_', '_LE', '_LI', '_LW', '_LC'} log_translation_re = re.compile( r".*LOG\.(%(levels)s)\(\s*(%(hints)s)\(" % { 'levels': '|'.join(_all_log_levels), 'hints': '|'.join(_all_hints), }) @core.flake8ext def no_db_session_in_public_api(logical_line, filename): if "db/api.py" in filename: if session_check.match(logical_line): yield (0, "M301: public db api methods may not accept" " session") @core.flake8ext def use_timeutils_utcnow(logical_line, filename): # tools are OK to use the standard datetime module if "/tools/" in filename: return msg = ("M302: timeutils.utcnow() must be used instead of " "datetime.%s()") datetime_funcs = ['now', 'utcnow'] for f in datetime_funcs: pos = logical_line.find('datetime.%s' % f) if pos != -1: yield (pos, msg % f) @core.flake8ext def capital_cfg_help(logical_line, tokens): msg = "M303: capitalize help string" if cfg_re.match(logical_line): for t in range(len(tokens)): if tokens[t][1] == "help": txt = tokens[t + 2][1] if len(txt) > 1 and txt[1].islower(): yield (0, msg) @core.flake8ext def assert_true_instance(logical_line): """Check for assertTrue(isinstance(a, b)) sentences M305 """ if asse_trueinst_re.match(logical_line): yield (0, "M305: assertTrue(isinstance(a, b)) sentences " "not allowed") @core.flake8ext def assert_equal_type(logical_line): """Check for assertEqual(type(A), B) sentences M306 """ if asse_equal_type_re.match(logical_line): yield (0, "M306: assertEqual(type(A), B) sentences not allowed") @core.flake8ext def no_translate_logs(logical_line): """Check for 'LOG.*(_*("' OpenStack no longer supports log translation, so we shouldn't translate logs. * This check assumes that 'LOG' is a logger. M308 """ if log_translation_re.match(logical_line): yield (0, "M308: Log messages should not be translated") @core.flake8ext def no_import_translation_in_tests(logical_line, filename): """Check for 'from masakari.i18n import _' M309 """ if 'masakari/tests/' in filename: res = import_translation_for_log_or_exception.match(logical_line) if res: yield (0, "M309 Don't import translation in tests") @core.flake8ext def no_setting_conf_directly_in_tests(logical_line, filename): """Check for setting CONF.* attributes directly in tests The value can leak out of tests affecting how subsequent tests run. Using self.flags(option=value) is the preferred method to temporarily set config options in tests. M310 """ if 'masakari/tests/' in filename: res = conf_attribute_set_re.match(logical_line) if res: yield (0, "M310: Setting CONF.* attributes directly in " "tests is forbidden. Use self.flags(option=value) " "instead") @core.flake8ext def no_mutable_default_args(logical_line): msg = "M315: Method's default argument shouldn't be mutable!" if mutable_default_args.match(logical_line): yield (0, msg) @core.flake8ext def check_explicit_underscore_import(logical_line, filename): """Check for explicit import of the _ function We need to ensure that any files that are using the _() function to translate logs are explicitly importing the _ function. We can't trust unit test to catch whether the import has been added so we need to check for it here. """ # Build a list of the files that have _ imported. No further # checking needed once it is found. if filename in UNDERSCORE_IMPORT_FILES: pass elif (underscore_import_check.match(logical_line) or custom_underscore_check.match(logical_line)): UNDERSCORE_IMPORT_FILES.append(filename) elif (translated_log.match(logical_line) or string_translation.match(logical_line)): yield (0, "M316: Found use of _() without explicit " "import of _ !") @core.flake8ext def use_jsonutils(logical_line, filename): # tools are OK to use the standard json module if "/tools/" in filename: return msg = "M317: jsonutils.%(fun)s must be used instead of json.%(fun)s" if "json." in logical_line: json_funcs = ['dumps(', 'dump(', 'loads(', 'load('] for f in json_funcs: pos = logical_line.find('json.%s' % f) if pos != -1: yield (pos, msg % {'fun': f[:-1]}) @core.flake8ext def assert_true_or_false_with_in(logical_line): """Check for assertTrue/False(A in B), assertTrue/False(A not in B), assertTrue/False(A in B, message) or assertTrue/False(A not in B, message) sentences. M318 """ res = (asse_true_false_with_in_or_not_in.search(logical_line) or asse_true_false_with_in_or_not_in_spaces.search(logical_line)) if res: yield (0, "M318: Use assertIn/NotIn(A, B) rather than " "assertTrue/False(A in/not in B) when checking collection " "contents.") @core.flake8ext def assert_raises_regexp(logical_line): """Check for usage of deprecated assertRaisesRegexp M319 """ res = asse_raises_regexp.search(logical_line) if res: yield (0, "M319: assertRaisesRegex must be used instead " "of assertRaisesRegexp") @core.flake8ext def dict_constructor_with_list_copy(logical_line): msg = ("M320: Must use a dict comprehension instead of a dict " "constructor with a sequence of key-value pairs.") if dict_constructor_with_list_copy_re.match(logical_line): yield (0, msg) @core.flake8ext def assert_equal_in(logical_line): """Check for assertEqual(A in B, True), assertEqual(True, A in B), assertEqual(A in B, False) or assertEqual(False, A in B) sentences M321 """ res = (asse_equal_in_start_with_true_or_false_re.search(logical_line) or asse_equal_in_end_with_true_or_false_re.search(logical_line)) if res: yield (0, "M321: Use assertIn/NotIn(A, B) rather than " "assertEqual(A in B, True/False) when checking collection " "contents.") @core.flake8ext def check_greenthread_spawns(logical_line, filename): """Check for use of greenthread.spawn(), greenthread.spawn_n(), eventlet.spawn(), and eventlet.spawn_n() M322 """ msg = ("M322: Use masakari.utils.%(spawn)s() rather than " "greenthread.%(spawn)s() and eventlet.%(spawn)s()") if "masakari/utils.py" in filename or "masakari/tests/" in filename: return match = re.match(spawn_re, logical_line) if match: yield (0, msg % {'spawn': match.group('spawn_part')}) @core.flake8ext def check_no_contextlib_nested(logical_line, filename): msg = ("M323: contextlib.nested is deprecated. With Python 2.7" "and later the with-statement supports multiple nested objects. " "See https://docs.python.org/2/library/contextlib.html" "#contextlib.nested for more information. masakari.test.nested() " "is an alternative as well.") if contextlib_nested.match(logical_line): yield (0, msg) @core.flake8ext def check_config_option_in_central_place(logical_line, filename): msg = ("M324: Config options should be in the central location " "'/masakari/conf/*'. Do not declare new config options outside " "of that folder.") # That's the correct location if "masakari/conf/" in filename: return # (pooja_jadhav) All config options (with exceptions that are clarified # in the list below) were moved to the central place. List below is for # all options that were impossible to move without doing a major impact # on code. Add full path to a module or folder. conf_exceptions = [ # CLI opts are allowed to be outside of masakari/conf directory 'masakari/cmd/manage.py', ] if any(f in filename for f in conf_exceptions): return if cfg_opt_re.match(logical_line): yield (0, msg) @core.flake8ext def check_doubled_words(physical_line, filename): """Check for the common doubled-word typos M325 """ msg = ("M325: Doubled word '%(word)s' typo found") match = re.search(doubled_words_re, physical_line) if match: return (0, msg % {'word': match.group(1)}) @core.flake8ext def check_python3_no_iteritems(logical_line): msg = ("M326: Use dict.items() instead of dict.iteritems().") if re.search(r".*\.iteritems\(\)", logical_line): yield (0, msg) @core.flake8ext def check_python3_no_iterkeys(logical_line): msg = ("M327: Use 'for key in dict' instead of 'for key in " "dict.iterkeys()'.") if re.search(r".*\.iterkeys\(\)", logical_line): yield (0, msg) @core.flake8ext def check_python3_no_itervalues(logical_line): msg = ("M328: Use dict.values() instead of dict.itervalues().") if re.search(r".*\.itervalues\(\)", logical_line): yield (0, msg) @core.flake8ext def no_os_popen(logical_line): """Disallow 'os.popen(' Deprecated library function os.popen() Replace it using subprocess https://bugs.launchpad.net/tempest/+bug/1529836 M329 """ if 'os.popen(' in logical_line: yield (0, 'M329 Deprecated library function os.popen(). ' 'Replace it using subprocess module. ') @core.flake8ext def no_log_warn(logical_line): """Disallow 'LOG.warn(' Deprecated LOG.warn(), instead use LOG.warning https://bugs.launchpad.net/senlin/+bug/1508442 M331 """ msg = ("M331: LOG.warn is deprecated, please use LOG.warning!") if "LOG.warn(" in logical_line: yield (0, msg) @core.flake8ext def yield_followed_by_space(logical_line): """Yield should be followed by a space. Yield should be followed by a space to clarify that yield is not a function. Adding a space may force the developer to rethink if there are unnecessary parentheses in the written code. Not correct: yield(x), yield(a, b) Correct: yield x, yield (a, b), yield a, b M332 """ if yield_not_followed_by_space.match(logical_line): yield (0, "M332: Yield keyword should be followed by a space.") @core.flake8ext def check_policy_registration_in_central_place(logical_line, filename): msg = ('M333: Policy registration should be in the central location ' '"/masakari/policies/*".') # This is where registration should happen if "masakari/policies/" in filename: return # A couple of policy tests register rules if "masakari/tests/unit/test_policy.py" in filename: return if rule_default_re.match(logical_line): yield (0, msg) @core.flake8ext def check_policy_enforce(logical_line, filename): """Look for uses of masakari.policy._ENFORCER.enforce() Now that policy defaults are registered in code the _ENFORCER.authorize method should be used. That ensures that only registered policies are used. Uses of _ENFORCER.enforce could allow unregistered policies to be used, so this check looks for uses of that method. M334 """ msg = ('M334: masakari.policy._ENFORCER.enforce() should not be used. ' 'Use the authorize() method instead.') if policy_enforce_re.match(logical_line): yield (0, msg) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/i18n.py0000664000175100017510000000202115033036143016342 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/usage.html. """ import oslo_i18n DOMAIN = 'masakari' _translators = oslo_i18n.TranslatorFactory(domain=DOMAIN) # The primary translation function using the well-known name "_" _ = _translators.primary def translate(value, user_locale): return oslo_i18n.translate(value, user_locale) def get_available_languages(): return oslo_i18n.get_available_languages(DOMAIN) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/manager.py0000664000175100017510000000673215033036143017212 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base Manager class. Managers are responsible for a certain aspect of the system. It is a logical grouping of code relating to a portion of the system. In general other components should be using the manager to make changes to the components that it is responsible for. We have adopted a basic strategy of Smart managers and dumb data, which means rather than attaching methods to data objects, components should call manager methods that act on the data. Methods on managers that can be executed locally should be called directly. If a particular method must execute on a remote host, this should be done via rpc to the service that wraps the manager. Managers should be responsible for most of the db access, and non-implementation specific data. Anything implementation specific that can't be generalized should be done by the Driver. Managers will often provide methods for initial setup of a host or periodic tasks to a wrapping service. This module provides Manager, a base class for managers. """ from oslo_service import periodic_task import masakari.conf CONF = masakari.conf.CONF class PeriodicTasks(periodic_task.PeriodicTasks): def __init__(self): super(PeriodicTasks, self).__init__(CONF) class Manager(PeriodicTasks): def __init__(self, host=None, service_name='undefined'): if not host: host = CONF.host self.host = host self.service_name = service_name super(Manager, self).__init__() def periodic_tasks(self, context, raise_on_error=False): """Tasks to be run at a periodic interval.""" return self.run_periodic_tasks(context, raise_on_error=raise_on_error) def init_host(self): """Hook to do additional manager initialization when one requests the service be started. This is called before any service record is created. Child classes should override this method. """ pass def cleanup_host(self): """Hook to do cleanup work when the service shuts down. Child classes should override this method. """ pass def pre_start_hook(self): """Hook to provide the manager the ability to do additional start-up work before any RPC queues/consumers are created. This is called after other initialization has succeeded and a service record is created. Child classes should override this method. """ pass def post_start_hook(self): """Hook to provide the manager the ability to do additional start-up work immediately after a service creates RPC consumers and starts 'running'. Child classes should override this method. """ pass def reset(self): """Hook called on SIGHUP to signal the manager to re-read any dynamic configuration or do any reconfiguration tasks. """ pass ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/notifications/0000775000175100017510000000000015033036146020072 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/notifications/__init__.py0000664000175100017510000000000015033036143022166 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5287101 masakari-19.1.0.dev18/masakari/notifications/objects/0000775000175100017510000000000015033036146021523 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/notifications/objects/__init__.py0000664000175100017510000000000015033036143023617 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/notifications/objects/base.py0000664000175100017510000001435215033036143023011 0ustar00mylesmyles# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from masakari.objects import base from masakari.objects import fields from masakari import rpc @base.MasakariObjectRegistry.register_if(False) class NotificationObject(base.MasakariObject): """Base class for every notification related versioned object.""" # Version 1.0: Initial version VERSION = '1.0' def __init__(self, **kwargs): super(NotificationObject, self).__init__(**kwargs) # The notification objects are created on the fly when masakari emits # the notification. This causes that every object shows every field as # changed. We don't want to send this meaningless information so we # reset the object after creation. self.obj_reset_changes(recursive=False) @base.MasakariObjectRegistry.register_notification class EventType(NotificationObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'action': fields.EventNotificationActionField(nullable=False), 'phase': fields.EventNotificationPhaseField(nullable=True), } def to_notification_event_type_field(self): """Serialize the object to the wire format.""" s = '%s' % (self.action) if self.obj_attr_is_set('phase'): s += '.%s' % self.phase return s @base.MasakariObjectRegistry.register_if(False) class NotificationPayloadBase(NotificationObject): """Base class for the payload of versioned notifications.""" # SCHEMA defines how to populate the payload fields. It is a dictionary # where every key value pair has the following format: # : (, # ) # The is the name where the data will be stored in the # payload object, this field has to be defined as a field of the payload. # The shall refer to name of the parameter passed as # kwarg to the payload's populate_schema() call and this object will be # used as the source of the data. The shall be # a valid field of the passed argument. # The SCHEMA needs to be applied with the populate_schema() call before the # notification can be emitted. # The value of the payload. field will be set by the # . field. The # will not be part of the payload object internal or # external representation. # Payload fields that are not set by the SCHEMA can be filled in the same # way as in any versioned object. SCHEMA = {} # Version 1.0: Initial version VERSION = '1.0' def __init__(self, **kwargs): super(NotificationPayloadBase, self).__init__(**kwargs) self.populated = not self.SCHEMA def populate_schema(self, **kwargs): """Populate the object based on the SCHEMA and the source objects :param kwargs: A dict contains the source object at the key defined in the SCHEMA """ for key, (obj, field) in self.SCHEMA.items(): source = kwargs[obj] if source.obj_attr_is_set(field): setattr(self, key, getattr(source, field)) self.populated = True # the schema population will create changed fields but we don't need # this information in the notification self.obj_reset_changes(recursive=False) @base.MasakariObjectRegistry.register_notification class NotificationPublisher(NotificationObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'host': fields.StringField(nullable=False), 'binary': fields.StringField(nullable=False), } @classmethod def from_service_obj(cls, service): return cls(host=service.host, binary=service.binary) @base.MasakariObjectRegistry.register_if(False) class NotificationBase(NotificationObject): """Base class for versioned notifications. Every subclass shall define a 'payload' field. """ # Version 1.0: Initial version VERSION = '1.0' fields = { 'priority': fields.EventNotificationPriorityField(), 'event_type': fields.ObjectField('EventType'), 'publisher': fields.ObjectField('NotificationPublisher'), } def _emit(self, context, event_type, publisher_id, payload): notifier = rpc.get_versioned_notifier(publisher_id) notify = getattr(notifier, self.priority) notify(context, event_type=event_type, payload=payload) def emit(self, context): """Send the notification.""" assert self.payload.populated # notification payload will be a newly populated object # therefore every field of it will look changed so this does not carry # any extra information so we drop this from the payload. self.payload.obj_reset_changes(recursive=False) self._emit(context, event_type=self.event_type.to_notification_event_type_field(), publisher_id='%s:%s' % (self.publisher.binary, self.publisher.host), payload=self.payload.obj_to_primitive()) def notification_sample(sample): """Class decorator for documentation generation purposes. This is to attach the notification sample information to the notification object for documentation generation purposes. :param sample: the path of the sample json file relative to the doc/notification_samples/ directory in the masakari repository root. """ def wrap(cls): if not getattr(cls, 'samples', None): cls.samples = [sample] else: cls.samples.append(sample) return cls return wrap ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/notifications/objects/exception.py0000664000175100017510000000473215033036143024076 0ustar00mylesmyles# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import inspect from masakari.notifications.objects import base from masakari.objects import base as masakari_base from masakari.objects import fields @masakari_base.MasakariObjectRegistry.register_notification class ExceptionPayload(base.NotificationPayloadBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'module_name': fields.StringField(), 'function_name': fields.StringField(), 'exception': fields.StringField(), 'exception_message': fields.StringField(), 'traceback': fields.StringField() } @classmethod def from_exc_and_traceback(cls, fault, traceback): trace = inspect.trace() # FIXME(mgoddard): In some code paths we reach this point without being # inside an exception handler. This results in inspect.trace() # returning an empty list. Ideally we should only end up here from an # exception handler. if trace: trace = trace[-1] # TODO(gibi): apply strutils.mask_password on exception_message and # consider emitting the exception_message only if the safe flag is # true in the exception like in the REST API module = inspect.getmodule(trace[0]) function_name = trace[3] else: module = None function_name = 'unknown' module_name = module.__name__ if module else 'unknown' return cls( function_name=function_name, module_name=module_name, exception=fault.__class__.__name__, exception_message=str(fault), traceback=traceback) @base.notification_sample('error-exception.json') @masakari_base.MasakariObjectRegistry.register_notification class ExceptionNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('ExceptionPayload') } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/notifications/objects/notification.py0000664000175100017510000001564115033036143024567 0ustar00mylesmyles# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from masakari.notifications.objects import base from masakari.objects import base as masakari_base from masakari.objects import fields @masakari_base.MasakariObjectRegistry.register_notification class SegmentApiPayloadBase(base.NotificationPayloadBase): SCHEMA = { 'id': ('segment', 'id'), 'uuid': ('segment', 'uuid'), 'name': ('segment', 'name'), 'service_type': ('segment', 'service_type'), 'description': ('segment', 'description'), 'recovery_method': ('segment', 'recovery_method'), 'enabled': ('segment', 'enabled'), } # Version 1.0: Initial version # Version 1.1: Add 'enabled' field VERSION = '1.1' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'service_type': fields.StringField(), 'description': fields.StringField(nullable=True), 'recovery_method': fields.FailoverSegmentRecoveryMethodField(), 'enabled': fields.BooleanField(), } def __init__(self, segment, **kwargs): super(SegmentApiPayloadBase, self).__init__(**kwargs) self.populate_schema(segment=segment) @masakari_base.MasakariObjectRegistry.register_notification class SegmentApiPayload(SegmentApiPayloadBase): # No SCHEMA as all the additional fields are calculated VERSION = '1.1' fields = { 'fault': fields.ObjectField('ExceptionPayload', nullable=True), } def __init__(self, segment, fault, **kwargs): super(SegmentApiPayload, self).__init__( segment=segment, fault=fault, **kwargs) @masakari_base.MasakariObjectRegistry.register_notification class HostApiPayloadBase(base.NotificationPayloadBase): SCHEMA = { 'id': ('host', 'id'), 'uuid': ('host', 'uuid'), 'name': ('host', 'name'), 'failover_segment': ('host', 'failover_segment'), 'type': ('host', 'type'), 'reserved': ('host', 'reserved'), 'control_attributes': ('host', 'control_attributes'), 'on_maintenance': ('host', 'on_maintenance'), } # Version 1.0: Initial version # Version 1.1: Removed 'failover_segment_id' parameter VERSION = '1.1' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'failover_segment': fields.ObjectField('FailoverSegment'), 'type': fields.StringField(), 'reserved': fields.BooleanField(), 'control_attributes': fields.StringField(), 'on_maintenance': fields.BooleanField(), } def __init__(self, host, **kwargs): super(HostApiPayloadBase, self).__init__(**kwargs) self.populate_schema(host=host) @masakari_base.MasakariObjectRegistry.register_notification class HostApiPayload(HostApiPayloadBase): # No SCHEMA as all the additional fields are calculated VERSION = '1.0' fields = { 'fault': fields.ObjectField('ExceptionPayload', nullable=True), } def __init__(self, host, fault, **kwargs): super(HostApiPayload, self).__init__( host=host, fault=fault, **kwargs) @masakari_base.MasakariObjectRegistry.register_notification class NotificationApiPayloadBase(base.NotificationPayloadBase): SCHEMA = { 'id': ('notification', 'id'), 'notification_uuid': ('notification', 'notification_uuid'), 'generated_time': ('notification', 'generated_time'), 'source_host_uuid': ('notification', 'source_host_uuid'), 'type': ('notification', 'type'), 'payload': ('notification', 'payload'), 'status': ('notification', 'status'), } # Version 1.0: Initial version VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'notification_uuid': fields.UUIDField(), 'generated_time': fields.DateTimeField(), 'source_host_uuid': fields.UUIDField(), 'type': fields.NotificationTypeField(), 'payload': fields.DictOfStringsField(), 'status': fields.NotificationStatusField(), } def __init__(self, notification, **kwargs): super(NotificationApiPayloadBase, self).__init__(**kwargs) self.populate_schema(notification=notification) @masakari_base.MasakariObjectRegistry.register_notification class NotificationApiPayload(NotificationApiPayloadBase): # No SCHEMA as all the additional fields are calculated VERSION = '1.0' fields = { 'fault': fields.ObjectField('ExceptionPayload', nullable=True), } def __init__(self, notification, fault, **kwargs): super(NotificationApiPayload, self).__init__( notification=notification, fault=fault, **kwargs) @base.notification_sample('create-segment-start.json') @base.notification_sample('create-segment-end.json') @base.notification_sample('update-segment-start.json') @base.notification_sample('update-segment-end.json') @base.notification_sample('delete-segment-start.json') @base.notification_sample('delete-segment-end.json') @masakari_base.MasakariObjectRegistry.register_notification class SegmentApiNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('SegmentApiPayload') } @base.notification_sample('create-host-start.json') @base.notification_sample('create-host-end.json') @base.notification_sample('update-host-start.json') @base.notification_sample('update-host-end.json') @base.notification_sample('delete-host-start.json') @base.notification_sample('delete-host-end.json') @masakari_base.MasakariObjectRegistry.register_notification class HostApiNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('HostApiPayload') } @base.notification_sample('create-notification-start.json') @base.notification_sample('create-notification-end.json') @base.notification_sample('process-notification-start.json') @base.notification_sample('process-notification-end.json') @base.notification_sample('process-notification-error.json') @masakari_base.MasakariObjectRegistry.register_notification class NotificationApiNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('NotificationApiPayload') } ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52971 masakari-19.1.0.dev18/masakari/objects/0000775000175100017510000000000015033036146016652 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/__init__.py0000664000175100017510000000174715033036143020771 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(Dinesh_Bhor): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('masakari.objects.host') __import__('masakari.objects.notification') __import__('masakari.objects.segment') __import__('masakari.objects.vmove') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/base.py0000664000175100017510000002324615033036143020142 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Masakari common internal object model""" import datetime from oslo_utils import versionutils from oslo_versionedobjects import base as ovoo_base from oslo_versionedobjects import fields as obj_fields from masakari import objects def get_attrname(name): """Return the mangled name of the attribute's underlying storage.""" return '_obj_' + name class MasakariObjectRegistry(ovoo_base.VersionedObjectRegistry): notification_classes = [] def registration_hook(self, cls, index): # NOTE(Dinesh_Bhor): This is called when an object is registered, # and is responsible for maintaining masakari.objects.$OBJECT # as the highest-versioned implementation of a given object. version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: cur_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >= cur_version: setattr(objects, cls.obj_name(), cls) @classmethod def register_notification(cls, notification_cls): """Register a class as notification. Use only to register concrete notification or payload classes, do not register base classes intended for inheritance only. """ cls.register_if(False)(notification_cls) cls.notification_classes.append(notification_cls) return notification_cls @classmethod def register_notification_objects(cls): """Register previously decorated notification as normal ovos. This is not intended for production use but only for testing and document generation purposes. """ for notification_cls in cls.notification_classes: cls.register(notification_cls) remotable_classmethod = ovoo_base.remotable_classmethod remotable = ovoo_base.remotable class MasakariObject(ovoo_base.VersionedObject): """Base class and object factory. This forms the base of all objects that can be remoted or instantiated via RPC. Simply defining a class that inherits from this base class will make it remotely instantiatable. Objects should implement the necessary "get" classmethod routines as well as "save" object methods as appropriate. """ OBJ_SERIAL_NAMESPACE = 'masakari_object' OBJ_PROJECT_NAMESPACE = 'masakari' def masakari_obj_get_changes(self): """Returns a dict of changed fields with tz unaware datetimes. Any timezone aware datetime field will be converted to UTC timezone and returned as timezone unaware datetime. This will allow us to pass these fields directly to a db update method as they can't have timezone information. """ # Get dirtied/changed fields changes = self.obj_get_changes() # Look for datetime objects that contain timezone information for k, v in changes.items(): if isinstance(v, datetime.datetime) and v.tzinfo: # Remove timezone information and adjust the time according to # the timezone information's offset. changes[k] = v.replace(tzinfo=None) - v.utcoffset() # Return modified dict return changes def obj_reset_changes(self, fields=None, recursive=False): """Reset the list of fields that have been changed. .. note:: - This is NOT "revert to previous values" - Specifying fields on recursive resets will only be honored at the top level. Everything below the top will reset all. :param fields: List of fields to reset, or "all" if None. :param recursive: Call obj_reset_changes(recursive=True) on any sub-objects within the list of fields being reset. """ if recursive: for field in self.obj_get_changes(): # Ignore fields not in requested set (if applicable) if fields and field not in fields: continue # Skip any fields that are unset if not self.obj_attr_is_set(field): continue value = getattr(self, field) # Don't reset nulled fields if value is None: continue # Reset straight Object and ListOfObjects fields if isinstance(self.fields[field], obj_fields.ObjectField): value.obj_reset_changes(recursive=True) elif isinstance(self.fields[field], obj_fields.ListOfObjectsField): for thing in value: thing.obj_reset_changes(recursive=True) if fields: self._changed_fields -= set(fields) else: self._changed_fields.clear() class MasakariObjectDictCompat(ovoo_base.VersionedObjectDictCompat): def __iter__(self): for name in self.obj_fields: if (self.obj_attr_is_set(name) or name in self.obj_extra_fields): yield name def keys(self): return list(self) class MasakariTimestampObject(object): """Mixin class for db backed objects with timestamp fields. Sqlalchemy models that inherit from the oslo_db TimestampMixin will include these fields and the corresponding objects will benefit from this mixin. """ fields = { 'created_at': obj_fields.DateTimeField(nullable=True), 'updated_at': obj_fields.DateTimeField(nullable=True), } class MasakariPersistentObject(object): """Mixin class for Persistent objects. This adds the fields that we use in common for most persistent objects. """ fields = { 'created_at': obj_fields.DateTimeField(nullable=True), 'updated_at': obj_fields.DateTimeField(nullable=True), 'deleted_at': obj_fields.DateTimeField(nullable=True), 'deleted': obj_fields.BooleanField(default=False), } class ObjectListBase(ovoo_base.ObjectListBase): @classmethod def _obj_primitive_key(cls, field): return 'masakari_object.%s' % field @classmethod def _obj_primitive_field(cls, primitive, field, default=obj_fields.UnspecifiedDefault): key = cls._obj_primitive_key(field) if default == obj_fields.UnspecifiedDefault: return primitive[key] else: return primitive.get(key, default) class MasakariObjectSerializer(ovoo_base.VersionedObjectSerializer): """A Masakari Object Serializer. This implements the Oslo Serializer interface and provides the ability to serialize and deserialize MasakariObject entities. Any service that needs to accept or return MasakariObjects as arguments or result values should pass this to its RPCClient and RPCServer objects. """ OBJ_BASE_CLASS = MasakariObject def __init__(self): super(MasakariObjectSerializer, self).__init__() def obj_make_list(context, list_obj, item_cls, db_list, **extra_args): """Construct an object list from a list of primitives. This calls item_cls._from_db_object() on each item of db_list, and adds the resulting object to list_obj. :param:context: Request context :param:list_obj: An ObjectListBase object :param:item_cls: The MasakariObject class of the objects within the list :param:db_list: The list of primitives to convert to objects :param:extra_args: Extra arguments to pass to _from_db_object() :returns: list_obj """ list_obj.objects = [] for db_item in db_list: item = item_cls._from_db_object(context, item_cls(), db_item, **extra_args) list_obj.objects.append(item) list_obj._context = context list_obj.obj_reset_changes() return list_obj def obj_equal_prims(obj_1, obj_2, ignore=None): """Compare two primitives for equivalence ignoring some keys. This operation tests the primitives of two objects for equivalence. Object primitives may contain a list identifying fields that have been changed - this is ignored in the comparison. The ignore parameter lists any other keys to be ignored. :param:obj1: The first object in the comparison :param:obj2: The second object in the comparison :param:ignore: A list of fields to ignore :returns: True if the primitives are equal ignoring changes and specified fields, otherwise False. """ def _strip(prim, keys): if isinstance(prim, dict): for k in keys: prim.pop(k, None) for v in prim.values(): _strip(v, keys) if isinstance(prim, list): for v in prim: _strip(v, keys) return prim if ignore is not None: keys = ['masakari_object.changes'] + ignore else: keys = ['masakari_object.changes'] prim_1 = _strip(obj_1.obj_to_primitive(), keys) prim_2 = _strip(obj_2.obj_to_primitive(), keys) return prim_1 == prim_2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/fields.py0000664000175100017510000002120615033036143020470 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import fields # Import fields from oslo.versionedobjects BooleanField = fields.BooleanField IntegerField = fields.IntegerField StringField = fields.StringField EnumField = fields.EnumField UUIDField = fields.UUIDField DateTimeField = fields.DateTimeField DictOfStringsField = fields.DictOfStringsField ObjectField = fields.ObjectField BaseEnumField = fields.BaseEnumField ListOfObjectsField = fields.ListOfObjectsField ListOfStringsField = fields.ListOfStringsField FloatField = fields.FloatField ListOfDictOfNullableStringsField = fields.ListOfDictOfNullableStringsField Field = fields.Field Enum = fields.Enum FieldType = fields.FieldType class BaseMasakariEnum(Enum): def __init__(self, **kwargs): super(BaseMasakariEnum, self).__init__( valid_values=self.__class__.ALL) class FailoverSegmentRecoveryMethod(Enum): """Represents possible recovery_methods for failover segment.""" AUTO = "auto" RESERVED_HOST = "reserved_host" AUTO_PRIORITY = "auto_priority" RH_PRIORITY = "rh_priority" ALL = (AUTO, RESERVED_HOST, AUTO_PRIORITY, RH_PRIORITY) def __init__(self): super(FailoverSegmentRecoveryMethod, self).__init__(valid_values=FailoverSegmentRecoveryMethod.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class NotificationType(Enum): """Represents possible notification types.""" COMPUTE_HOST = "COMPUTE_HOST" VM = "VM" PROCESS = "PROCESS" ALL = (COMPUTE_HOST, VM, PROCESS) def __init__(self): super(NotificationType, self).__init__(valid_values=NotificationType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class EventType(Enum): """Represents possible event types.""" STARTED = "STARTED" STOPPED = "STOPPED" ALL = (STARTED, STOPPED) def __init__(self): super(EventType, self).__init__(valid_values=EventType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class HostStatusType(Enum): """Represents possible event types for Host status.""" NORMAL = "NORMAL" UNKNOWN = "UNKNOWN" ALL = (NORMAL, UNKNOWN) def __init__(self): super(HostStatusType, self).__init__(valid_values=HostStatusType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class ClusterStatusType(Enum): """Represents possible event types for Cluster status.""" ONLINE = "ONLINE" OFFLINE = "OFFLINE" ALL = (ONLINE, OFFLINE) def __init__(self): super(ClusterStatusType, self).__init__(valid_values=ClusterStatusType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class NotificationStatus(Enum): """Represents possible statuses for notifications.""" NEW = "new" RUNNING = "running" ERROR = "error" FAILED = "failed" IGNORED = "ignored" FINISHED = "finished" ALL = (NEW, RUNNING, ERROR, FAILED, IGNORED, FINISHED) def __init__(self): super(NotificationStatus, self).__init__(valid_values=NotificationStatus.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class EventNotificationAction(Enum): # Actions of segments SEGMENT_CREATE = 'segment.create' SEGMENT_UPDATE = 'segment.update' SEGMENT_DELETE = 'segment.delete' # Actions of hosts HOST_CREATE = 'host.create' HOST_UPDATE = 'host.update' HOST_DELETE = 'host.delete' # Actions of notifications NOTIFICATION_CREATE = 'notification.create' NOTIFICATION_PROCESS = 'notification.process' ALL = (SEGMENT_CREATE, SEGMENT_UPDATE, SEGMENT_DELETE, HOST_CREATE, HOST_UPDATE, HOST_DELETE, NOTIFICATION_CREATE, NOTIFICATION_PROCESS) def __init__(self): super(EventNotificationAction, self).__init__(valid_values=EventNotificationAction.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class EventNotificationPriority(BaseMasakariEnum): AUDIT = 'audit' CRITICAL = 'critical' DEBUG = 'debug' INFO = 'info' ERROR = 'error' SAMPLE = 'sample' WARN = 'warn' ALL = (AUDIT, CRITICAL, DEBUG, INFO, ERROR, SAMPLE, WARN) class EventNotificationPhase(Enum): START = 'start' END = 'end' ERROR = 'error' ALL = (START, END, ERROR) def __init__(self): super(EventNotificationPhase, self).__init__(valid_values=EventNotificationPhase.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class FailoverSegmentRecoveryMethodField(BaseEnumField): AUTO_TYPE = FailoverSegmentRecoveryMethod() class NotificationTypeField(BaseEnumField): AUTO_TYPE = NotificationType() class NotificationStatusField(BaseEnumField): AUTO_TYPE = NotificationStatus() class EventNotificationActionField(BaseEnumField): AUTO_TYPE = EventNotificationAction() class EventNotificationPriorityField(BaseEnumField): AUTO_TYPE = EventNotificationPriority() class EventNotificationPhaseField(BaseEnumField): AUTO_TYPE = EventNotificationPhase() class VMoveType(Enum): """Represents possible types for VMoves.""" EVACUATION = "evacuation" MIGRATION = "migration" LIVE_MIGRATION = "live_migration" ALL = (EVACUATION, MIGRATION, LIVE_MIGRATION) def __init__(self): super(VMoveType, self).__init__(valid_values=VMoveType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class VMoveTypeField(BaseEnumField): AUTO_TYPE = VMoveType() class VMoveStatus(Enum): """Represents possible statuses for VMoves.""" PENDING = "pending" ONGOING = "ongoing" IGNORED = "ignored" FAILED = "failed" SUCCEEDED = "succeeded" ALL = (PENDING, ONGOING, IGNORED, FAILED, SUCCEEDED) def __init__(self): super(VMoveStatus, self).__init__(valid_values=VMoveStatus.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class VMoveStatusField(BaseEnumField): AUTO_TYPE = VMoveStatus() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/host.py0000664000175100017510000001527615033036143020211 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_utils import uuidutils from oslo_utils import versionutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) @base.MasakariObjectRegistry.register class Host(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): # Version 1.0: Initial version # Version 1.1: Added 'segment_uuid' parameter to 'get_by_uuid' method # Version 1.2: Removed 'failover_segment_id' parameter which can be # retrieved from failover_segment object VERSION = '1.2' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'failover_segment': fields.ObjectField('FailoverSegment'), 'type': fields.StringField(), 'reserved': fields.BooleanField(), 'control_attributes': fields.StringField(), 'on_maintenance': fields.BooleanField(), } def obj_make_compatible(self, primitive, target_version): super(Host, self).obj_make_compatible(primitive, target_version) target_version = versionutils.convert_version_to_tuple(target_version) if target_version >= (1, 2) and 'failover_segment_id' in primitive: del primitive['failover_segment_id'] @staticmethod def _from_db_object(context, host, db_host): for key in host.fields: db_value = db_host.get(key) if key == "failover_segment": db_value = objects.FailoverSegment._from_db_object( context, objects.FailoverSegment(), db_value) setattr(host, key, db_value) host.obj_reset_changes() host._context = context return host @base.remotable_classmethod def get_by_id(cls, context, id): db_inst = db.host_get_by_id(context, id) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_uuid(cls, context, uuid, segment_uuid=None): db_inst = db.host_get_by_uuid(context, uuid, segment_uuid=segment_uuid) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_name(cls, context, name): db_inst = db.host_get_by_name(context, name) return cls._from_db_object(context, cls(), db_inst) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() if 'uuid' not in updates: updates['uuid'] = uuidutils.generate_uuid() LOG.debug('Generated uuid %(uuid)s for host', dict(uuid=updates['uuid'])) if 'failover_segment' not in updates: raise exception.ObjectActionError(action='create', reason='failover segment ' 'not assigned') segment = updates.pop('failover_segment') updates['failover_segment_id'] = segment.uuid api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_CREATE, phase=fields.EventNotificationPhase.START) db_host = db.host_create(self._context, updates) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_CREATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_host) @base.remotable def save(self): updates = self.masakari_obj_get_changes() if 'failover_segment' in updates: raise exception.ObjectActionError(action='save', reason='failover segment ' 'changed') updates.pop('id', None) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_UPDATE, phase=fields.EventNotificationPhase.START) db_host = db.host_update(self._context, self.uuid, updates) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_UPDATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_host) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') if not self.obj_attr_is_set('uuid'): raise exception.ObjectActionError(action='destroy', reason='no uuid') api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_DELETE, phase=fields.EventNotificationPhase.START) db.host_delete(self._context, self.uuid) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_DELETE, phase=fields.EventNotificationPhase.END) delattr(self, base.get_attrname('id')) @base.MasakariObjectRegistry.register class HostList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('Host'), } @base.remotable_classmethod def get_all(cls, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.host_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return base.obj_make_list(context, cls(context), objects.Host, groups) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/notification.py0000664000175100017510000001643515033036143021720 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) NOTIFICATION_OPTIONAL_FIELDS = ['recovery_workflow_details'] @base.MasakariObjectRegistry.register class Notification(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): # Version 1.0: Initial version # Version 1.1: Added recovery_workflow_details field. # Note: This field shouldn't be persisted. # Version 1.2: Added failover_segment_uuid and message field. VERSION = '1.2' fields = { 'id': fields.IntegerField(), 'notification_uuid': fields.UUIDField(), 'generated_time': fields.DateTimeField(), 'source_host_uuid': fields.UUIDField(), 'type': fields.NotificationTypeField(), 'payload': fields.DictOfStringsField(), 'status': fields.NotificationStatusField(), # NOTE(ShilpaSD): This field shouldn't be stored in db. # The recovery workflow details read from the 'notification_driver' # will be set to this field. 'recovery_workflow_details': fields.ListOfObjectsField( 'NotificationProgressDetails', default=[]), 'failover_segment_uuid': fields.UUIDField(), 'message': fields.StringField(nullable=True), } @staticmethod def _from_db_object(context, notification, db_notification): for key in notification.fields: if key in NOTIFICATION_OPTIONAL_FIELDS: continue if key != 'payload': setattr(notification, key, db_notification.get(key)) else: payload = db_notification.get("payload") notification.payload = jsonutils.loads(payload) notification.obj_reset_changes() notification._context = context return notification @base.remotable_classmethod def get_by_id(cls, context, id): db_notification = db.notification_get_by_id(context, id) return cls._from_db_object(context, cls(), db_notification) @base.remotable_classmethod def get_by_uuid(cls, context, uuid): db_notification = db.notification_get_by_uuid(context, uuid) return cls._from_db_object(context, cls(), db_notification) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() # NOTE(ShilpaSD): This field doesn't exist in the Notification # db model so don't save it. updates.pop('recovery_workflow_details', None) if 'notification_uuid' not in updates: updates['notification_uuid'] = uuidutils.generate_uuid() LOG.debug('Generated uuid %(uuid)s for notifications', dict(uuid=updates['notification_uuid'])) if 'payload' in updates: updates['payload'] = jsonutils.dumps(updates['payload']) api_utils.notify_about_notification_api(self._context, self, action=fields.EventNotificationAction.NOTIFICATION_CREATE, phase=fields.EventNotificationPhase.START) db_notification = db.notification_create(self._context, updates) api_utils.notify_about_notification_api(self._context, self, action=fields.EventNotificationAction.NOTIFICATION_CREATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_notification) @base.remotable def save(self): updates = self.masakari_obj_get_changes() updates.pop('id', None) # NOTE(ShilpaSD): This field doesn't exist in the Notification # db model so don't save it. updates.pop('recovery_workflow_details', None) db_notification = db.notification_update(self._context, self.notification_uuid, updates) self._from_db_object(self._context, self, db_notification) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') if not self.obj_attr_is_set('notification_uuid'): raise exception.ObjectActionError(action='destroy', reason='no uuid') db.notification_delete(self._context, self.notification_uuid) delattr(self, base.get_attrname('id')) @base.MasakariObjectRegistry.register class NotificationList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('Notification'), } @base.remotable_classmethod def get_all(cls, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.notifications_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker ) return base.obj_make_list(context, cls(context), objects.Notification, groups) def notification_sample(sample): """Class decorator to attach the notification sample information to the notification object for documentation generation purposes. :param sample: the path of the sample json file relative to the doc/notification_samples/ directory in the nova repository root. """ def wrap(cls): cls.sample = sample return cls return wrap @base.MasakariObjectRegistry.register class NotificationProgressDetails(base.MasakariObject, base.MasakariObjectDictCompat): VERSION = '1.0' fields = { 'name': fields.StringField(), 'progress': fields.FloatField(), 'progress_details': fields.ListOfDictOfNullableStringsField( default=[]), 'state': fields.StringField() } @classmethod def create(cls, name, progress, progress_details, state,): return cls(name=name, progress=progress, progress_details=progress_details, state=state) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/segment.py0000664000175100017510000001437715033036143020677 0ustar00mylesmyles# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_utils import uuidutils from oslo_utils import versionutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) @base.MasakariObjectRegistry.register class FailoverSegment(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): # 1.0, init # 1.1, add enabled field VERSION = '1.1' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'service_type': fields.StringField(), 'enabled': fields.BooleanField(default=True), 'description': fields.StringField(nullable=True), 'recovery_method': fields.FailoverSegmentRecoveryMethodField(), } def obj_make_compatible(self, primitive, target_version): super(FailoverSegment, self).obj_make_compatible(primitive, target_version) target_version = versionutils.convert_version_to_tuple(target_version) if target_version < (1, 1) and 'enabled' in primitive: del primitive['enabled'] @staticmethod def _from_db_object(context, segment, db_segment): for key in segment.fields: setattr(segment, key, db_segment[key]) segment._context = context segment.obj_reset_changes() return segment @base.remotable_classmethod def get_by_id(cls, context, id): db_inst = db.failover_segment_get_by_id(context, id) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_uuid(cls, context, uuid): db_inst = db.failover_segment_get_by_uuid(context, uuid) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_name(cls, context, name): db_inst = db.failover_segment_get_by_name(context, name) return cls._from_db_object(context, cls(), db_inst) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() if 'uuid' not in updates: updates['uuid'] = uuidutils.generate_uuid() LOG.debug('Generated uuid %(uuid)s for failover segment', dict(uuid=updates['uuid'])) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_CREATE, phase=fields.EventNotificationPhase.START) db_segment = db.failover_segment_create(self._context, updates) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_CREATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_segment) @base.remotable def save(self): updates = self.masakari_obj_get_changes() updates.pop('id', None) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_UPDATE, phase=fields.EventNotificationPhase.START) db_segment = db.failover_segment_update(self._context, self.uuid, updates) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_UPDATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_segment) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') if not self.obj_attr_is_set('uuid'): raise exception.ObjectActionError(action='destroy', reason='no uuid') api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_DELETE, phase=fields.EventNotificationPhase.START) db.failover_segment_delete(self._context, self.uuid) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_DELETE, phase=fields.EventNotificationPhase.END) delattr(self, base.get_attrname('id')) def is_under_recovery(self, filters=None): return db.is_failover_segment_under_recovery(self._context, self.uuid, filters=filters) @base.MasakariObjectRegistry.register class FailoverSegmentList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('FailoverSegment'), } @base.remotable_classmethod def get_all(cls, ctxt, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.failover_segment_get_all_by_filters(ctxt, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return base.obj_make_list(ctxt, cls(ctxt), objects.FailoverSegment, groups) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/objects/vmove.py0000664000175100017510000000762315033036143020365 0ustar00mylesmyles# Copyright(c) 2022 Inspur # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_log import log as logging from oslo_utils import uuidutils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) @base.MasakariObjectRegistry.register class VMove(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'notification_uuid': fields.UUIDField(), 'instance_uuid': fields.UUIDField(), 'instance_name': fields.StringField(), 'source_host': fields.StringField(nullable=True), 'dest_host': fields.StringField(nullable=True), 'start_time': fields.DateTimeField(nullable=True), 'end_time': fields.DateTimeField(nullable=True), 'type': fields.VMoveTypeField(nullable=True), 'status': fields.VMoveStatusField(nullable=True), 'message': fields.StringField(nullable=True), } @staticmethod def _from_db_object(context, vmove, db_vmove): for key in vmove.fields: setattr(vmove, key, db_vmove[key]) vmove._context = context vmove.obj_reset_changes() return vmove @base.remotable_classmethod def get_by_uuid(cls, context, uuid): db_inst = db.vmove_get_by_uuid(context, uuid) return cls._from_db_object(context, cls(), db_inst) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() if 'uuid' not in updates: updates['uuid'] = uuidutils.generate_uuid() vmove = db.vmove_create(self._context, updates) self._from_db_object(self._context, self, vmove) @base.remotable def save(self): updates = self.masakari_obj_get_changes() updates.pop('id', None) vmove = db.vmove_update(self._context, self.uuid, updates) self._from_db_object(self._context, self, vmove) @base.MasakariObjectRegistry.register class VMoveList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('VMove'), } @base.remotable_classmethod def get_all(cls, ctxt, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.vmoves_get_all_by_filters(ctxt, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return base.obj_make_list(ctxt, cls(ctxt), objects.VMove, groups) @base.remotable_classmethod def get_all_vmoves(cls, ctxt, notification_uuid, status=None): filters = { 'notification_uuid': notification_uuid } if status: filters['status'] = status groups = db.vmoves_get_all_by_filters(ctxt, filters=filters) return base.obj_make_list(ctxt, cls(ctxt), objects.VMove, groups) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52971 masakari-19.1.0.dev18/masakari/policies/0000775000175100017510000000000015033036146017030 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/__init__.py0000664000175100017510000000225215033036143021137 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools from masakari.policies import base from masakari.policies import extension_info from masakari.policies import hosts from masakari.policies import notifications from masakari.policies import segments from masakari.policies import versions from masakari.policies import vmoves def list_rules(): return itertools.chain( base.list_rules(), extension_info.list_rules(), hosts.list_rules(), notifications.list_rules(), segments.list_rules(), versions.list_rules(), vmoves.list_rules() ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/base.py0000664000175100017510000000235315033036143020314 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy MASAKARI_API = 'os_masakari_api' RULE_ADMIN_OR_OWNER = 'rule:admin_or_owner' RULE_ADMIN_API = 'rule:admin_api' RULE_ANY = '@' rules = [ policy.RuleDefault( "context_is_admin", "role:admin", "Decides what is required for the 'is_admin:True' check to succeed."), policy.RuleDefault( "admin_or_owner", "is_admin:True or project_id:%(project_id)s", "Default rule for most non-Admin APIs."), policy.RuleDefault( "admin_api", "is_admin:True", "Default rule for most Admin APIs.") ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/extension_info.py0000664000175100017510000000306215033036143022427 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base EXTENSIONS = 'os_masakari_api:extensions:%s' rules = [ policy.DocumentedRuleDefault( name=EXTENSIONS % 'index', check_str=base.RULE_ADMIN_API, description="List available extensions.", operations=[ { 'method': 'GET', 'path': '/extensions' } ]), policy.DocumentedRuleDefault( name=EXTENSIONS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows information for an extension.", operations=[ { 'method': 'GET', 'path': '/extensions/{extensions_id}' } ]), policy.RuleDefault( name=EXTENSIONS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Extension Info API extensions to change the API.", ), ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/hosts.py0000664000175100017510000000506115033036143020541 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base HOSTS = 'os_masakari_api:os-hosts:%s' rules = [ policy.DocumentedRuleDefault( name=HOSTS % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, names, type, reserved, on_maintenance for all" " hosts.", operations=[ { 'method': 'GET', 'path': '/segments/{segment_id}/hosts' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for a host.", operations=[ { 'method': 'GET', 'path': '/segments/{segment_id}/hosts/{host_id}' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'create', check_str=base.RULE_ADMIN_API, description="Creates a host under given segment.", operations=[ { 'method': 'POST', 'path': '/segments/{segment_id}/hosts' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'update', check_str=base.RULE_ADMIN_API, description="Updates the editable attributes of an existing host.", operations=[ { 'method': 'PUT', 'path': '/segments/{segment_id}/hosts/{host_id}' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'delete', check_str=base.RULE_ADMIN_API, description="Deletes a host from given segment.", operations=[ { 'method': 'DELETE', 'path': '/segments/{segment_id}/hosts/{host_id}' } ]), policy.RuleDefault( name=HOSTS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Host API extensions to change the API.", ), ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/notifications.py0000664000175100017510000000371515033036143022256 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base NOTIFICATIONS = 'os_masakari_api:notifications:%s' rules = [ policy.DocumentedRuleDefault( name=NOTIFICATIONS % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, notification types, host_name, generated_time," " payload and status for all notifications.", operations=[ { 'method': 'GET', 'path': '/notifications' } ]), policy.DocumentedRuleDefault( name=NOTIFICATIONS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for a notification.", operations=[ { 'method': 'GET', 'path': '/notifications/{notification_id}' } ]), policy.DocumentedRuleDefault( name=NOTIFICATIONS % 'create', check_str=base.RULE_ADMIN_API, description="Creates a notification.", operations=[ { 'method': 'POST', 'path': '/notifications' } ]), policy.RuleDefault( name=NOTIFICATIONS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Notification API extensions to change the API.", ), ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/segments.py0000664000175100017510000000474415033036143021235 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base SEGMENTS = 'os_masakari_api:segments:%s' rules = [ policy.DocumentedRuleDefault( name=SEGMENTS % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, names, description, recovery_method, " "service_type for all segments.", operations=[ { 'method': 'GET', 'path': '/segments' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for a segment.", operations=[ { 'method': 'GET', 'path': '/segments/{segment_id}' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'create', check_str=base.RULE_ADMIN_API, description="Creates a segment.", operations=[ { 'method': 'POST', 'path': '/segments' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'update', check_str=base.RULE_ADMIN_API, description="Updates the editable attributes of an existing host.", operations=[ { 'method': 'PUT', 'path': '/segments/{segment_id}' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'delete', check_str=base.RULE_ADMIN_API, description="Deletes a segment.", operations=[ { 'method': 'DELETE', 'path': '/segments/{segment_id}' } ]), policy.RuleDefault( name=SEGMENTS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Segment API extensions to change the API.", ), ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/versions.py0000664000175100017510000000231215033036143021245 0ustar00mylesmyles# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base VERSIONS = 'os_masakari_api:versions:%s' rules = [ policy.DocumentedRuleDefault( name=VERSIONS % 'index', check_str=base.RULE_ANY, description="List all versions.", operations=[ { 'method': 'GET', 'path': '/' } ]), policy.RuleDefault( name=VERSIONS % 'discoverable', check_str=base.RULE_ANY, description="Version API extensions to change the API.", ), ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policies/vmoves.py0000664000175100017510000000321215033036143020714 0ustar00mylesmyles# Copyright(c) 2022 Inspur # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_policy import policy from masakari.policies import base VMOVES = 'os_masakari_api:vmoves:%s' rules = [ policy.DocumentedRuleDefault( name=VMOVES % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, notification_id, instance_id, source_host, " "dest_host, status and type for all VM moves.", operations=[ { 'method': 'GET', 'path': '/notifications/{notification_id}/vmoves' } ]), policy.DocumentedRuleDefault( name=VMOVES % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for one VM move.", operations=[ { 'method': 'GET', 'path': '/notifications/{notification_id}/vmoves/' '{vmove_id}' } ]), policy.RuleDefault( name=VMOVES % 'discoverable', check_str=base.RULE_ADMIN_API, description="VM moves API extensions to change the API.", ), ] def list_rules(): return rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/policy.py0000664000175100017510000002026215033036143017071 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Policy Engine For Masakari.""" import copy import logging import re import sys from oslo_config import cfg from oslo_policy import policy from oslo_utils import excutils from masakari import exception # from masakari.i18n import _LE, _LW from masakari import policies CONF = cfg.CONF LOG = logging.getLogger(__name__) _ENFORCER = None # saved_file_rules and used to compare with new rules to determine the # rules whether were updated. saved_file_rules = [] KEY_EXPR = re.compile(r'%\((\w+)\)s') def reset(): global _ENFORCER if _ENFORCER: _ENFORCER.clear() _ENFORCER = None def init(policy_file=None, rules=None, default_rule=None, use_conf=True, suppress_deprecation_warnings=False): """Init an Enforcer class. :param policy_file: Custom policy file to use, if none is specified, `CONF.policy_file` will be used. :param rules: Default dictionary / Rules to use. It will be considered just in the first instantiation. :param default_rule: Default rule to use, CONF.default_rule will be used if none is specified. :param use_conf: Whether to load rules from config file. :param suppress_deprecation_warnings: Whether to suppress the deprecation warnings. """ global _ENFORCER global saved_file_rules if not _ENFORCER: _ENFORCER = policy.Enforcer(CONF, policy_file=policy_file, rules=rules, default_rule=default_rule, use_conf=use_conf) # NOTE(gmann): Explictly disable the warnings for policies # changing their default check_str. During policy-defaults-refresh # work, all the policy defaults have been changed and warning for # each policy started filling the logs limit for various tool. # Once we move to new defaults only world then we can enable these # warning again. _ENFORCER.suppress_default_change_warnings = True if suppress_deprecation_warnings: _ENFORCER.suppress_deprecation_warnings = True register_rules(_ENFORCER) _ENFORCER.load_rules() # Only the rules which are loaded from file may be changed. current_file_rules = _ENFORCER.file_rules current_file_rules = _serialize_rules(current_file_rules) # Checks whether the rules are updated in the runtime if saved_file_rules != current_file_rules: _warning_for_deprecated_user_based_rules(current_file_rules) saved_file_rules = copy.deepcopy(current_file_rules) def _serialize_rules(rules): """Serialize all the Rule object as string which is used to compare the rules list. """ result = [(rule_name, str(rule)) for rule_name, rule in rules.items()] return sorted(result, key=lambda rule: rule[0]) def _warning_for_deprecated_user_based_rules(rules): """Warning user based policy enforcement used in the rule but the rule doesn't support it. """ for rule in rules: if 'user_id' in KEY_EXPR.findall(rule[1]): LOG.debug(("The user_id attribute isn't supported in the rule%s'. " "All the user_id based policy enforcement will be " "removed in the future."), rule[0]) def set_rules(rules, overwrite=True, use_conf=False): """Set rules based on the provided dict of rules. :param rules: New rules to use. It should be an instance of dict. :param overwrite: Whether to overwrite current rules or update them with the new rules. :param use_conf: Whether to reload rules from config file. """ init(use_conf=False) _ENFORCER.set_rules(rules, overwrite, use_conf) def authorize(context, action, target, do_raise=True, exc=None): """Verifies that the action is valid on the target in this context. :param context: masakari context :param action: string representing the action to be checked this should be colon separated for clarity. i.e. ``os_masakari_api:segments``, ``os_masakari_api:os-hosts``, ``os_masakari_api:notifications``, ``os_masakari_api:extensions`` :param target: dictionary representing the object of the action for object creation this should be a dictionary representing the location of the object e.g. ``{'project_id': context.project_id}`` :param do_raise: if True (the default), raises PolicyNotAuthorized; if False, returns False :param exc: Class of the exception to raise if the check fails. Any remaining arguments passed to :meth:`authorize` (both positional and keyword arguments) will be passed to the exception class. If not specified, :class:`PolicyNotAuthorized` will be used. :raises masakari.exception.PolicyNotAuthorized: if verification fails and do_raise is True. Or if 'exc' is specified it will raise an exception of that type. :return: returns a non-False value (not necessarily "True") if authorized, and the exact value False if not authorized and do_raise is False. """ init() credentials = context.to_policy_values() if not exc: exc = exception.PolicyNotAuthorized try: result = _ENFORCER.authorize(action, target, credentials, do_raise=do_raise, exc=exc, action=action) except policy.PolicyNotRegistered: with excutils.save_and_reraise_exception(): LOG.debug('Policy not registered') except Exception: with excutils.save_and_reraise_exception(): LOG.debug('Policy check for %(action)s failed with credentials ' '%(credentials)s', {'action': action, 'credentials': credentials}) return result def check_is_admin(context): """Whether or not roles contains 'admin' role according to policy setting. """ init() # the target is user-self credentials = context.to_policy_values() target = credentials return _ENFORCER.authorize('context_is_admin', target, credentials) @policy.register('is_admin') class IsAdminCheck(policy.Check): """An explicit check for is_admin.""" def __init__(self, kind, match): """Initialize the check.""" self.expected = (match.lower() == 'true') super(IsAdminCheck, self).__init__(kind, str(self.expected)) def __call__(self, target, creds, enforcer): """Determine whether is_admin matches the requested value.""" return creds['is_admin'] == self.expected def get_rules(): if _ENFORCER: return _ENFORCER.rules def register_rules(enforcer): enforcer.register_defaults(policies.list_rules()) def get_enforcer(): # This method is for use by oslopolicy CLI scripts. Those scripts need the # 'output-file' and 'namespace' options, but having those in sys.argv means # loading the Masakari config options will fail as those are not expected # to be present. So we pass in an arg list with those stripped out. conf_args = [] # Start at 1 because cfg.CONF expects the equivalent of sys.argv[1:] i = 1 while i < len(sys.argv): if sys.argv[i].strip('-') in ['namespace', 'output-file']: i += 2 continue conf_args.append(sys.argv[i]) i += 1 cfg.CONF(conf_args, project='masakari') init() return _ENFORCER ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/rpc.py0000664000175100017510000001142415033036143016356 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_messaging as messaging from oslo_messaging.rpc import dispatcher from oslo_serialization import jsonutils import masakari.context import masakari.exception from masakari.objects import base __all__ = [ 'init', 'cleanup', 'set_defaults', 'add_extra_exmods', 'clear_extra_exmods', 'get_allowed_exmods', 'RequestContextSerializer', 'get_client', 'get_server', ] CONF = masakari.conf.CONF TRANSPORT = None NOTIFICATION_TRANSPORT = None NOTIFIER = None ALLOWED_EXMODS = [ masakari.exception.__name__, ] EXTRA_EXMODS = [] def init(conf): global TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER exmods = get_allowed_exmods() TRANSPORT = create_transport(get_transport_url()) NOTIFICATION_TRANSPORT = messaging.get_notification_transport( conf, allowed_remote_exmods=exmods) serializer = RequestContextSerializer(JsonPayloadSerializer()) NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT, serializer=serializer, topics=['versioned_notifications']) def initialized(): return None not in [TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER] def cleanup(): global TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER assert TRANSPORT is not None assert NOTIFICATION_TRANSPORT is not None assert NOTIFIER is not None TRANSPORT.cleanup() NOTIFICATION_TRANSPORT.cleanup() TRANSPORT = NOTIFICATION_TRANSPORT = NOTIFIER = None def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def add_extra_exmods(*args): EXTRA_EXMODS.extend(args) def clear_extra_exmods(): del EXTRA_EXMODS[:] def get_allowed_exmods(): return ALLOWED_EXMODS + EXTRA_EXMODS def get_transport_url(url_str=None): return messaging.TransportURL.parse(CONF, url_str) def create_transport(url): exmods = get_allowed_exmods() return messaging.get_rpc_transport(CONF, url=url, allowed_remote_exmods=exmods) class JsonPayloadSerializer(messaging.NoOpSerializer): @staticmethod def serialize_entity(context, entity): return jsonutils.to_primitive(entity, convert_instances=True) class RequestContextSerializer(messaging.Serializer): def __init__(self, base): self._base = base def serialize_entity(self, context, entity): if not self._base: return entity return self._base.serialize_entity(context, entity) def deserialize_entity(self, context, entity): if not self._base: return entity return self._base.deserialize_entity(context, entity) def serialize_context(self, context): return context.to_dict() def deserialize_context(self, context): return masakari.context.RequestContext.from_dict(context) def get_client(target, version_cap=None, serializer=None): assert TRANSPORT is not None serializer = RequestContextSerializer(serializer) return messaging.get_rpc_client( TRANSPORT, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, serializer=None): assert TRANSPORT is not None access_policy = dispatcher.DefaultRPCAccessPolicy serializer = RequestContextSerializer(serializer) return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor='eventlet', serializer=serializer, access_policy=access_policy) def get_versioned_notifier(publisher_id): assert NOTIFIER is not None return NOTIFIER.prepare(publisher_id=publisher_id) class RPCAPI(object): """Mixin class aggregating methods related to RPC API compatibility.""" RPC_API_VERSION = '1.0' TOPIC = '' BINARY = '' def __init__(self): target = messaging.Target(topic=self.TOPIC, version=self.RPC_API_VERSION) serializer = base.MasakariObjectSerializer() self.client = get_client(target, serializer=serializer) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/safe_utils.py0000664000175100017510000000256415033036143017735 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions that won't produce circular imports.""" def get_wrapped_function(function): """Get the method at the bottom of a stack of decorators.""" if not hasattr(function, '__closure__') or not function.__closure__: return function def _get_wrapped_function(function): if not hasattr(function, '__closure__') or not function.__closure__: return None for closure in function.__closure__: func = closure.cell_contents deeper_func = _get_wrapped_function(func) if deeper_func: return deeper_func elif hasattr(closure.cell_contents, '__call__'): return closure.cell_contents return _get_wrapped_function(function) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/service.py0000664000175100017510000002312515033036143017233 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Generic Node base class for all workers that run on hosts.""" import os import random import sys from oslo_concurrency import processutils from oslo_log import log as logging import oslo_messaging as messaging from oslo_service import service from oslo_utils import importutils from masakari.api import wsgi import masakari.conf from masakari import context from masakari import coordination as masakari_coordination from masakari import exception from masakari.i18n import _ from masakari.objects import base as objects_base from masakari import rpc from masakari import utils from masakari import version LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF class Service(service.Service): """Service object for binaries running on hosts. A service takes a manager and enables rpc by listening to queues based on topic. It also periodically runs tasks on the manager. """ def __init__(self, host, binary, topic, manager, periodic_enable=None, periodic_fuzzy_delay=None, periodic_interval_max=None): super(Service, self).__init__() if not rpc.initialized(): rpc.init(CONF) self.host = host self.binary = binary self.topic = topic self.manager_class_name = manager manager_class = importutils.import_class(self.manager_class_name) self.rpcserver = None self.manager = manager_class(host=self.host) self.periodic_enable = periodic_enable self.periodic_fuzzy_delay = periodic_fuzzy_delay self.periodic_interval_max = periodic_interval_max def __repr__(self): return "<%(cls_name)s: host=%(host)s, binary=%(binary)s, " \ "manager_class_name=%(manager)s>" %\ { 'cls_name': self.__class__.__name__, 'host': self.host, 'binary': self.binary, 'manager': self.manager_class_name } def start(self): verstr = version.version_string_with_package() LOG.info('Starting %(topic)s (version %(version)s)', { 'topic': self.topic, 'version': verstr }) self.basic_config_check() LOG.debug("Creating RPC server for service %s", self.topic) target = messaging.Target(topic=self.topic, server=self.host) endpoints = [self.manager] serializer = objects_base.MasakariObjectSerializer() self.rpcserver = rpc.get_server(target, endpoints, serializer) self.rpcserver.start() if self.periodic_enable: if self.periodic_fuzzy_delay: initial_delay = random.randint(0, self.periodic_fuzzy_delay) else: initial_delay = None self.tg.add_dynamic_timer( self.periodic_tasks, initial_delay=initial_delay, periodic_interval_max=self.periodic_interval_max) def __getattr__(self, key): manager = self.__dict__.get('manager', None) return getattr(manager, key) @classmethod def create(cls, host=None, binary=None, topic=None, manager=None, periodic_enable=None, periodic_fuzzy_delay=None, periodic_interval_max=None): """Instantiates class and passes back application object. :param host: defaults to CONF.host :param binary: defaults to basename of executable :param topic: defaults to bin_name - 'masakari-' part :param manager: defaults to CONF._manager :param periodic_enable: defaults to CONF.periodic_enable :param periodic_fuzzy_delay: defaults to CONF.periodic_fuzzy_delay :param periodic_interval_max: if set, the max time to wait between runs """ if not host: host = CONF.host if not binary: binary = os.path.basename(sys.argv[0]) if not topic: topic = binary.rpartition('masakari-')[2] if not manager: manager_cls = ('%s_manager' % binary.rpartition('masakari-')[2]) manager = CONF.get(manager_cls, None) if periodic_enable is None: periodic_enable = CONF.periodic_enable if periodic_fuzzy_delay is None: periodic_fuzzy_delay = CONF.periodic_fuzzy_delay if periodic_interval_max is None: periodic_interval_max = CONF.periodic_interval_max service_obj = cls(host, binary, topic, manager, periodic_enable=periodic_enable, periodic_fuzzy_delay=periodic_fuzzy_delay, periodic_interval_max=periodic_interval_max) return service_obj def kill(self): """Destroy the service object in the datastore. NOTE: Although this method is not used anywhere else than tests, it is convenient to have it here, so the tests might easily and in clean way stop and remove the service_ref. """ self.stop() def stop(self): # Try to shut the connection down, but if we get any sort of # errors, go ahead and ignore them.. as we're shutting down anyway try: self.rpcserver.stop() except Exception: pass super(Service, self).stop() def periodic_tasks(self, raise_on_error=False): """Tasks to be run at a periodic interval.""" ctxt = context.get_admin_context() return self.manager.periodic_tasks(ctxt, raise_on_error=raise_on_error) def basic_config_check(self): """Perform basic config checks before starting processing.""" # Make sure the tempdir exists and is writable try: with utils.tempdir(): pass except Exception as e: LOG.error('Temporary directory is invalid: %s', e) sys.exit(1) def reset(self): self.manager.reset() class WSGIService(service.Service): """Provides ability to launch API from a 'paste' configuration.""" def __init__(self, name, loader=None, use_ssl=False, max_url_len=None, coordination=False): """Initialize, but do not start the WSGI server. :param name: The name of the WSGI server given to the loader. :param loader: Loads the WSGI application using the given name. :returns: None """ self.name = name self.binary = 'masakari-%s' % name self.topic = None self.loader = loader or wsgi.Loader() self.app = self.loader.load_app(name) self.host = getattr(CONF, '%s_listen' % name, "0.0.0.0") self.port = getattr(CONF, '%s_listen_port' % name, 0) self.workers = (getattr(CONF, '%s_workers' % name, None) or processutils.get_worker_count()) if self.workers and self.workers < 1: worker_name = '%s_workers' % name msg = (_("%(worker_name)s value of %(workers)s is invalid, " "must be greater than 0") % {'worker_name': worker_name, 'workers': str(self.workers)}) raise exception.InvalidInput(msg) self.use_ssl = use_ssl self.server = wsgi.Server(name, self.app, host=self.host, port=self.port, use_ssl=self.use_ssl, max_url_len=max_url_len) self.coordination = coordination def reset(self): """Reset server greenpool size to default. :returns: None """ self.server.reset() def start(self): """Start serving this service using loaded configuration. Also, retrieve updated port number in case '0' was passed in, which indicates a random port should be used. :returns: None """ if self.coordination: masakari_coordination.COORDINATOR.start() self.server.start() def stop(self): """Stop serving this API. :returns: None """ if self.coordination: try: masakari_coordination.COORDINATOR.stop() except Exception as error: LOG.warning('Error occurred during masakari coordination was ' 'stopped: %s', error) self.server.stop() def wait(self): """Wait for the service to stop serving this API. :returns: None """ self.server.wait() def process_launcher(): return service.ProcessLauncher(CONF, restart_method='mutate') # NOTE: the global launcher is to maintain the existing # functionality of calling service.serve + # service.wait _launcher = None def serve(server, workers=None): global _launcher if _launcher: raise RuntimeError(_('serve() can only be called once')) _launcher = service.launch(CONF, server, workers=workers, restart_method='mutate') def wait(): _launcher.wait() ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52971 masakari-19.1.0.dev18/masakari/tests/0000775000175100017510000000000015033036146016363 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/__init__.py0000664000175100017510000000000015033036143020457 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/fixtures.py0000664000175100017510000001430215033036143020603 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fixtures for Masakari tests.""" import warnings import fixtures from oslo_config import cfg from sqlalchemy import exc as sqla_exc from masakari.db import migration from masakari.db.sqlalchemy import api as session from masakari import exception CONF = cfg.CONF DB_SCHEMA = {'main': ""} SESSION_CONFIGURED = False class Timeout(fixtures.Fixture): """Setup per test timeouts. In order to avoid test deadlocks we support setting up a test timeout parameter read from the environment. In almost all cases where the timeout is reached this means a deadlock. A class level TIMEOUT_SCALING_FACTOR also exists, which allows extremely long tests to specify they need more time. """ def __init__(self, timeout, scaling=1): super(Timeout, self).__init__() try: self.test_timeout = int(timeout) except ValueError: # If timeout value is invalid do not set a timeout. self.test_timeout = 0 if scaling >= 1: self.test_timeout *= scaling else: raise ValueError('scaling value must be >= 1') def setUp(self): super(Timeout, self).setUp() if self.test_timeout > 0: self.useFixture(fixtures.Timeout(self.test_timeout, gentle=True)) class BannedDBSchemaOperations(fixtures.Fixture): """Ban some operations for migrations""" def __init__(self, banned_resources=None): super(BannedDBSchemaOperations, self).__init__() self._banned_resources = banned_resources or [] @staticmethod def _explode(resource, op): raise exception.DBNotAllowed( 'Operation %s.%s() is not allowed in a database migration' % ( resource, op)) def setUp(self): super(BannedDBSchemaOperations, self).setUp() for thing in self._banned_resources: self.useFixture(fixtures.MonkeyPatch( 'sqlalchemy.%s.drop' % thing, lambda *a, **k: self._explode(thing, 'drop'))) self.useFixture(fixtures.MonkeyPatch( 'sqlalchemy.%s.alter' % thing, lambda *a, **k: self._explode(thing, 'alter'))) class DatabasePoisonFixture(fixtures.Fixture): def setUp(self): super(DatabasePoisonFixture, self).setUp() self.useFixture(fixtures.MonkeyPatch( 'oslo_db.sqlalchemy.enginefacade._TransactionFactory.' '_create_session', self._poison_configure)) def _poison_configure(self, *a, **k): raise Exception('This test uses methods that set internal oslo_db ' 'state, but it does not claim to use the database. ' 'This will conflict with the setup of tests that ' 'do use the database and cause failures later.') class Database(fixtures.Fixture): def __init__(self, database='main', connection=None): """Create a database fixture. :param database: The type of database, 'main' :param connection: The connection string to use """ super(Database, self).__init__() global SESSION_CONFIGURED if not SESSION_CONFIGURED: session.configure(CONF) SESSION_CONFIGURED = True self.database = database if connection is not None: ctxt_mgr = session.create_context_manager( connection=connection) facade = ctxt_mgr.get_legacy_facade() self.get_engine = facade.get_engine else: self.get_engine = session.get_engine def _cache_schema(self): global DB_SCHEMA if not DB_SCHEMA[self.database]: engine = self.get_engine() conn = engine.connect() migration.db_sync() DB_SCHEMA[self.database] = "".join(line for line in conn.connection.iterdump()) engine.dispose() def cleanup(self): engine = self.get_engine() engine.dispose() def reset(self): self._cache_schema() engine = self.get_engine() engine.dispose() conn = engine.connect() conn.connection.executescript(DB_SCHEMA[self.database]) def setUp(self): super(Database, self).setUp() self.reset() self.addCleanup(self.cleanup) class WarningsFixture(fixtures.Fixture): """Filters out warnings during test runs.""" def setUp(self): super().setUp() self._original_warning_filters = warnings.filters[:] warnings.simplefilter('once', DeprecationWarning) # The UUIDFields emits a warning if the value is not a valid UUID. # Let's escalate that to an exception in the test to prevent adding # violations. warnings.filterwarnings('error', message='.*invalid UUID.*') # Enable deprecation warnings for nova itself to capture upcoming # SQLAlchemy changes warnings.filterwarnings( 'ignore', category=sqla_exc.SADeprecationWarning, ) warnings.filterwarnings( 'error', module='masakari', category=sqla_exc.SADeprecationWarning, ) # Enable general SQLAlchemy warnings also to ensure we're not doing # silly stuff. It's possible that we'll need to filter things out here # with future SQLAlchemy versions, but that's a good thing warnings.filterwarnings( 'error', module='masakari', category=sqla_exc.SAWarning, ) self.addCleanup(self._reset_warning_filters) def _reset_warning_filters(self): warnings.filters[:] = self._original_warning_filters ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52971 masakari-19.1.0.dev18/masakari/tests/functional/0000775000175100017510000000000015033036146020525 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/__init__.py0000664000175100017510000000000015033036143022621 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/base.py0000664000175100017510000000320615033036143022007 0ustar00mylesmyles# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys import openstack from oslotest import base openstack.enable_logging( debug=True, http_debug=True, stream=sys.stdout, format_stream=True, format_template='%(asctime)s %(name)-32s %(message)s', ) #: Defines the OpenStack Client Config (OCC) cloud keys in your OCC config #: file, typically in /etc/openstack/clouds.yaml. That configuration #: will determine where the functional tests will be run and what resource #: defaults will be used to run the functional tests. ADMIN_CLOUD_NAME = os.getenv( "MASAKARI_FUNCTIONAL_TESTS_ADMIN", "devstack-admin" ) USER_CLOUD_NAME = os.getenv("MASAKARI_FUNCTIONAL_TESTS_USER", "devstack") class BaseFunctionalTest(base.BaseTestCase): def setUp(self, ha_api_version="1.0"): super(BaseFunctionalTest, self).setUp() self.admin_conn = openstack.connect( cloud=ADMIN_CLOUD_NAME, ha_api_version=ha_api_version, ) self.conn = openstack.connect(cloud=USER_CLOUD_NAME) self.hypervisors = self.admin_conn.list_hypervisors() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/notification_base.py0000664000175100017510000000746715033036143024572 0ustar00mylesmyles# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_service import loopingcall from masakari.objects import fields from masakari.tests.functional import base class NotificationTestBase(base.BaseFunctionalTest): SERVICE_TYPE = "COMPUTE" HOST_TYPE = "COMPUTE" CONTROL_ATTRIBUTES = "SSH" SERVER_WAIT_INTERVAL = 1 SERVER_WAIT_PERIOD = 300 def setUp(self, ha_api_version="1.0", recovery_method="auto"): super(NotificationTestBase, self).setUp(ha_api_version=ha_api_version) self.recovery_method = recovery_method if not self.hypervisors: self.skipTest("Skip Test as there are no hypervisors " "configured in nova") # Get image and flavor to create server self.image_uuids = [image.id for image in self.conn.compute.images()] self.flavors = [flavor.id for flavor in self.conn.compute.flavors()] if not self.image_uuids: self.skipTest("Skip Test as there are no images " "configured in glance") if not self.flavors: self.skipTest("Skip Test as there are no flavors " "configured in nova") # Create segment self.segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=self.recovery_method, service_type=self.SERVICE_TYPE) # Create valid host host_name = self.hypervisors[0]['name'] self.host = self.admin_conn.ha.create_host( segment_id=self.segment.uuid, name=host_name, type=self.HOST_TYPE, control_attributes=self.CONTROL_ATTRIBUTES) # Delete segment which delete all hosts associated with it self.addCleanup(self.admin_conn.ha.delete_segment, self.segment.uuid) def check_notification_status(self, notification, wait_interval, wait_period): def wait_for_notification_status_finished(): result = self.admin_conn.ha.get_notification( notification.notification_uuid) if result.status == fields.NotificationStatus.FINISHED: raise loopingcall.LoopingCallDone() timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( wait_for_notification_status_finished) try: timer.start(interval=wait_interval, initial_delay=1, timeout=wait_period).wait() except loopingcall.LoopingCallTimeOut: self.fail("Timed out: Notification is not processed and " "it's not in the finished status") def check_server_status(self, server, status): def wait_for_server_status_change(): instance = self.admin_conn.compute.get_server(server.id) if instance.status == status: raise loopingcall.LoopingCallDone() timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( wait_for_server_status_change) try: timer.start(interval=self.SERVER_WAIT_INTERVAL, timeout=self.SERVER_WAIT_PERIOD).wait() except loopingcall.LoopingCallTimeOut: self.fail("Timed out: Instance is not in the expected" " status: %s" % status) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/test_hosts.py0000664000175100017510000001642215033036143023300 0ustar00mylesmyles# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt from masakari.objects import fields from masakari.tests.functional import base @ddt.ddt class TestHosts(base.BaseFunctionalTest): def setUp(self): super(TestHosts, self).setUp() if not self.hypervisors: self.skipTest("Skipped as there are no hypervisors " "configured in nova") # Create segment self.segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO, service_type='COMPUTE') # Delete segment which deletes host/s associated with it self.addCleanup(self.admin_conn.ha.delete_segment, self.segment.uuid) def test_create_get(self): # This test is for testing hosts create/get # Create valid host host_name = self.hypervisors[0]['name'] host_data = {'name': host_name, 'type': 'COMPUTE', 'on_maintenance': False, 'reserved': False, 'control_attributes': 'SSH'} host = self.admin_conn.ha.create_host(self.segment.uuid, **host_data) self.assertEqual(dict(host), {**host_data, **host}) result = self.admin_conn.ha.get_host(host.uuid, self.segment.uuid) self.assertEqual('COMPUTE', result.type) self.assertEqual(False, result.on_maintenance) self.assertEqual(False, result.reserved) self.assertEqual('SSH', result.control_attributes) def test_list(self): # This test is for testing host/s creation and listing the same. expected_hosts = [] for host in self.hypervisors: host_data = { 'name': host.name, 'type': 'COMPUTE', 'on_maintenance': False, 'reserved': False, 'control_attributes': 'SSH', } self.admin_conn.ha.create_host( segment_id=self.segment.uuid, **host_data) # NOTE(yoctozepto): 'failover_segment_id' is added in the API # response. We can verify it here. host_data['failover_segment_id'] = self.segment.uuid expected_hosts.append(host_data) hosts = self.admin_conn.ha.hosts(self.segment.uuid) # NOTE(yoctozepto): We are saving the generator values to a list to # compare the length and then iterate over the elements for comparison. hosts = list(hosts) self.assertEqual(len(expected_hosts), len(hosts)) for expected_host in expected_hosts: found = False for host in hosts: found = found or (dict(host, **expected_host) == dict(host)) self.assertEqual(True, found, 'Host not found: {expected_host}'.format( expected_host=expected_host)) @ddt.data( {'on_maintenance': False, 'host_type': 'COMPUTE', 'reserved': False, 'control_attributes': 'SSH'}, {'on_maintenance': True, 'host_type': 'CONTROLLER', 'reserved': True, 'control_attributes': 'TCP'} ) @ddt.unpack def test_create_list_with_filter(self, on_maintenance, host_type, reserved, control_attributes): # This test is for testing host/s creation and listing # the same based on filters. if len(self.hypervisors) == 1: self.skipTest("Skipped as there is only one hypervisor " "configured in nova") host_data_1 = {'name': self.hypervisors[0].name, 'type': 'COMPUTE', 'on_maintenance': False, 'reserved': False, 'control_attributes': 'SSH'} host_data_2 = {'name': self.hypervisors[1].name, 'type': 'CONTROLLER', 'on_maintenance': True, 'reserved': True, 'control_attributes': 'TCP'} self.admin_conn.ha.create_host(self.segment.uuid, **host_data_1) self.admin_conn.ha.create_host(self.segment.uuid, **host_data_2) expected_host_data = {'on_maintenance': on_maintenance, 'type': host_type, 'reserved': reserved, 'control_attributes': control_attributes } # Returns list of hosts based on filters for host in self.admin_conn.ha.hosts(self.segment.uuid, on_maintenance=on_maintenance, type=host_type, reserved=reserved): self.assertEqual(dict(host), {**expected_host_data, **host}) def test_update_get_delete(self): # This test is for updating created host and deletion of same host_name = self.hypervisors[0]['name'] host = self.admin_conn.ha.create_host(segment_id=self.segment.uuid, name=host_name, on_maintenance='False', reserved='False', type='COMPUTE', control_attributes='SSH') self.admin_conn.ha.update_host(host['uuid'], segment_id=self.segment.uuid, on_maintenance='True', control_attributes='TCP', reserved='True') result = self.admin_conn.ha.get_host(host.uuid, self.segment.uuid) # Confirm host update self.assertEqual(True, result.on_maintenance) self.assertEqual(True, result.reserved) self.assertEqual('TCP', result.control_attributes) def test_update_host_name(self): # This test is for updating host name if len(self.hypervisors) == 1: self.skipTest("Skipped as there is only one hypervisor " "configured in nova") host = self.admin_conn.ha.create_host(segment_id=self.segment.uuid, name=self.hypervisors[0]['name'], type='COMPUTE', control_attributes='SSH') # Update host name updated_host = self.admin_conn.ha.update_host(host['uuid'], segment_id=self.segment.uuid, name=self.hypervisors[1]['name']) result = self.admin_conn.ha.get_host(host.uuid, self.segment.uuid) self.assertEqual(result.name, updated_host.name) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/test_process_notifications.py0000664000175100017510000001162115033036143026543 0ustar00mylesmyles# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from masakari.objects import fields from masakari.tests.functional import notification_base as base class NotificationProcessTestCase(base.NotificationTestBase): NOTIFICATION_TYPE = "PROCESS" NOTIFICATION_WAIT_INTERVAL = 1 NOTIFICATION_WAIT_PERIOD = 600 def setUp(self, ha_api_version="1.0"): super(NotificationProcessTestCase, self).setUp(ha_api_version) def _test_create_notification_event_stopped(self): # Test to create notification for process with 'STOPPED' event type notification = self.admin_conn.ha.create_notification( type=self.NOTIFICATION_TYPE, hostname=self.host.name, generated_time=timeutils.utcnow().replace(microsecond=0), payload={"process_name": "nova-compute", "event": fields.EventType.STOPPED}) self.check_notification_status(notification, self.NOTIFICATION_WAIT_INTERVAL, self.NOTIFICATION_WAIT_PERIOD) notification = self.admin_conn.ha.get_notification( notification.notification_uuid) self.assertEqual(fields.NotificationStatus.FINISHED, notification.status) host = self.admin_conn.ha.get_host(self.host.uuid, self.segment.uuid) self.assertEqual(True, host.on_maintenance) services = self.admin_conn.compute.services() for service in services: if service.binary == 'nova-compute': if service.host == self.host.name: # Enable n-cpu service which is disabled during # DisableComputeNodetask of process recovery notification # created above. self.admin_conn.compute.enable_service(service, service.host, service.binary) return notification def _test_create_notification_event_start(self): # Test to create notification for process with 'STARTED' event type notification = self.admin_conn.ha.create_notification( type=self.NOTIFICATION_TYPE, hostname=self.host.name, generated_time=timeutils.utcnow().replace(microsecond=0), payload={"process_name": "nova-compute", "event": fields.EventType.STARTED}) self.check_notification_status(notification, self.NOTIFICATION_WAIT_INTERVAL, self.NOTIFICATION_WAIT_PERIOD) notification = self.admin_conn.ha.get_notification( notification.notification_uuid) self.assertEqual(fields.NotificationStatus.FINISHED, notification.status) return notification def test_create_notification_event_stopped(self): # Test to create notification for process with 'STOPPED' event type self._test_create_notification_event_stopped() def test_create_notification_event_start(self): # Test to create notification for process with 'STARTED' event type self._test_create_notification_event_start() class NotificationProcessTestCase_V1_1(NotificationProcessTestCase): def setUp(self): super(NotificationProcessTestCase, self).setUp("1.1") def test_create_notification_event_stopped(self): # Test to create notification for process with 'STOPPED' event type notification = self._test_create_notification_event_stopped() self.assertIsNotNone(notification.recovery_workflow_details) recovery_details = notification.recovery_workflow_details # check the status of each task is successful for details in recovery_details: self.assertEqual("SUCCESS", details.state) def test_create_notification_event_start(self): # Test to create notification for process with 'STARTED' event type notification = self._test_create_notification_event_start() self.assertIsNotNone(notification.recovery_workflow_details) recovery_details = notification.recovery_workflow_details # check the status of each task is successful for details in recovery_details: self.assertEqual("SUCCESS", details.state) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/test_segments.py0000664000175100017510000001453415033036143023767 0ustar00mylesmyles# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack import exceptions from masakari.objects import fields from masakari.tests.functional import base class TestSegments(base.BaseFunctionalTest): def test_create_get_delete(self): # This test will create, get and delete a segment segment_data = { 'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.AUTO, 'service_type': 'COMPUTE', } segment = self.admin_conn.ha.create_segment(**segment_data) self.assertEqual(dict(segment), {**segment_data, **segment}) result = self.admin_conn.ha.get_segment(segment.uuid) self.assertEqual(segment.name, result.name) self.assertEqual(segment.recovery_method, result.recovery_method) self.assertEqual(segment.service_type, result.service_type) self.admin_conn.ha.delete_segment(segment.uuid) self.assertRaises(exceptions.ResourceNotFound, self.admin_conn.ha.get_segment, segment.uuid) def test_create_delete_with_host(self): # This test is for deleting a segment with hosts if not self.hypervisors: self.skipTest("Skipped as there are no hypervisors " "configured in nova") segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO, service_type='COMPUTE') # Create valid host host_name = self.hypervisors[0]['name'] host = self.admin_conn.ha.create_host(segment_id=segment.uuid, name=host_name, type='COMPUTE', control_attributes='SSH') result = self.admin_conn.ha.get_segment(segment.uuid) self.assertEqual(segment.name, result.name) # Delete segment, which should delete hosts as well self.admin_conn.ha.delete_segment(segment['uuid']) self.assertRaises(exceptions.ResourceNotFound, self.admin_conn.ha.get_segment, segment.uuid) self.assertRaises(exceptions.ResourceNotFound, self.admin_conn.ha.get_host, host.uuid, segment.uuid) def test_list(self): # This test is for listing segments using filters segment_data_1 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.AUTO, 'service_type': 'COMPUTE'} segment_data_2 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, 'service_type': 'COMPUTE'} # Create segments segment_1 = self.admin_conn.ha.create_segment(**segment_data_1) segment_2 = self.admin_conn.ha.create_segment(**segment_data_2) # Delete segments self.addCleanup(self.admin_conn.ha.delete_segment, segment_1.uuid) self.addCleanup(self.admin_conn.ha.delete_segment, segment_2.uuid) segments = self.admin_conn.ha.segments() self.assertCountEqual([segment_1, segment_2], segments) def test_list_with_filter(self): # This test is for listing segments using filters segment_data_1 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.AUTO, 'service_type': 'COMPUTE'} segment_data_2 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, 'service_type': 'COMPUTE'} # Create segments segment_1 = self.admin_conn.ha.create_segment(**segment_data_1) segment_2 = self.admin_conn.ha.create_segment(**segment_data_2) # Delete segments self.addCleanup(self.admin_conn.ha.delete_segment, segment_1.uuid) self.addCleanup(self.admin_conn.ha.delete_segment, segment_2.uuid) for seg_object in self.admin_conn.ha.segments( recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO): self.assertEqual( dict(seg_object), {**segment_data_1, **seg_object}) for seg_object in self.admin_conn.ha.segments( recovery_method=fields.FailoverSegmentRecoveryMethod. RESERVED_HOST): self.assertEqual( dict(seg_object), {**segment_data_2, **seg_object}) def test_update_with_host(self): # This test is for updating segment with host if not self.hypervisors: self.skipTest("Skipped as there are no hypervisors " "configured in nova") segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO, service_type='COMPUTE') # Delete segment self.addCleanup(self.admin_conn.ha.delete_segment, segment.uuid) # Create valid host host_name = self.hypervisors[0]['name'] self.admin_conn.ha.create_host(segment_id=segment.uuid, name=host_name, type='COMPUTE', control_attributes='SSH') # Update segment segment_1 = self.admin_conn.ha.update_segment(segment.uuid, name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, service_type='CONTROLLER') result = self.admin_conn.ha.get_segment(segment.uuid) self.assertEqual(segment_1.name, result.name) self.assertEqual(segment_1.recovery_method, result.recovery_method) self.assertEqual(segment_1.service_type, result.service_type) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/functional/test_vm_notifications.py0000664000175100017510000000625215033036143025513 0ustar00mylesmyles# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from masakari.objects import fields from masakari.tests.functional import notification_base as base class NotificationVMTestCase(base.NotificationTestBase): NOTIFICATION_TYPE = "VM" NOTIFICATION_WAIT_INTERVAL = 1 NOTIFICATION_WAIT_PERIOD = 600 def setUp(self, ha_api_version="1.0"): super(NotificationVMTestCase, self).setUp(ha_api_version) def _test_create_notification(self): # need at least api 2.37 to create a server without a network self.conn.compute.default_microversion = "2.37" # Create server server = self.conn.compute.create_server( name='masakari_test', flavorRef=self.flavors[0], imageRef=self.image_uuids[0], networks="none", metadata={'HA_Enabled': 'True'}) self.addCleanup(self.conn.compute.delete_server, server) self.check_server_status(server, 'ACTIVE') self.admin_conn.compute.stop_server(server.id) self.check_server_status(server, 'SHUTOFF') notification = self.admin_conn.ha.create_notification( type=self.NOTIFICATION_TYPE, hostname=self.host.name, generated_time=timeutils.utcnow().replace(microsecond=0), payload={"instance_uuid": server.id, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE"}) self.check_notification_status(notification, self.NOTIFICATION_WAIT_INTERVAL, self.NOTIFICATION_WAIT_PERIOD) notification = self.admin_conn.instance_ha.get_notification( notification.notification_uuid) result = self.admin_conn.compute.get_server(server.id) self.assertEqual(fields.NotificationStatus.FINISHED, notification.status) self.assertEqual('ACTIVE', result.status) return notification def test_create_notification(self): # Test to create notification for VM notification type self._test_create_notification() class NotificationVMTestCase_V1_1(NotificationVMTestCase): def setUp(self): super(NotificationVMTestCase, self).setUp("1.1") def test_create_notification(self): notification = self._test_create_notification() self.assertIsNotNone(notification.recovery_workflow_details) recovery_details = notification.recovery_workflow_details # check the status of each task is successful for details in recovery_details: self.assertEqual("SUCCESS", details.state) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/json_ref.py0000664000175100017510000000453515033036143020546 0ustar00mylesmyles# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from oslo_serialization import jsonutils def _resolve_ref(ref, base_path): file_path, _, json_path = ref.partition('#') if json_path: raise NotImplementedError('JSON refs with JSON path after the "#" is ' 'not yet supported') path = os.path.join(base_path, file_path) # binary mode is needed due to bug/1515231 with open(path, 'r+b') as f: ref_value = jsonutils.load(f) base_path = os.path.dirname(path) res = resolve_refs(ref_value, base_path) return res def resolve_refs(obj_with_refs, base_path): if isinstance(obj_with_refs, list): for i, item in enumerate(obj_with_refs): obj_with_refs[i] = resolve_refs(item, base_path) elif isinstance(obj_with_refs, dict): if '$ref' in obj_with_refs.keys(): ref = obj_with_refs.pop('$ref') resolved_ref = _resolve_ref(ref, base_path) # the rest of the ref dict contains overrides for the ref. Resolve # refs in the overrides then apply those overrides recursively # here. resolved_overrides = resolve_refs(obj_with_refs, base_path) _update_dict_recursively(resolved_ref, resolved_overrides) return resolved_ref else: for key, value in obj_with_refs.items(): obj_with_refs[key] = resolve_refs(value, base_path) else: # scalar, nothing to do pass return obj_with_refs def _update_dict_recursively(d, update): """Update dict d recursively with data from dict update""" for k, v in update.items(): if k in d and isinstance(d[k], dict) and isinstance(v, dict): _update_dict_recursively(d[k], v) else: d[k] = v ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53071 masakari-19.1.0.dev18/masakari/tests/unit/0000775000175100017510000000000015033036146017342 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/__init__.py0000664000175100017510000000151115033036143021446 0ustar00mylesmyles# Copyright 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ :mod:`masakari.tests.unit` -- Masakari Unittests ===================================================== .. automodule:: masakari.tests.unit :platform: Unix """ import eventlet eventlet.monkey_patch(os=False) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53071 masakari-19.1.0.dev18/masakari/tests/unit/api/0000775000175100017510000000000015033036146020113 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/__init__.py0000664000175100017510000000000015033036143022207 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5317101 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/0000775000175100017510000000000015033036146022102 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/__init__.py0000664000175100017510000000000015033036143024176 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/fakes.py0000664000175100017510000001007415033036143023544 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils import routes import webob.dec from masakari.api import api_version_request as api_version from masakari.api import auth as api_auth from masakari.api import openstack as openstack_api from masakari.api.openstack import ha from masakari.api.openstack.ha import versions from masakari.api.openstack import wsgi as os_wsgi from masakari.api import urlmap from masakari.api import wsgi from masakari import context from masakari.tests import uuidsentinel @webob.dec.wsgify def fake_wsgi(self, req): return self.application def wsgi_app_v1(fake_auth_context=None, init_only=None): inner_app_v1 = ha.APIRouterV1() if fake_auth_context is not None: ctxt = fake_auth_context else: ctxt = context.RequestContext('fake', 'fake', auth_token=True) api_v1 = ( openstack_api.FaultWrapper(api_auth.InjectContext(ctxt, inner_app_v1))) mapper = urlmap.URLMap() mapper['/v1'] = api_v1 mapper['/'] = openstack_api.FaultWrapper(versions.Versions()) return mapper class FakeToken(object): id_count = 0 def __getitem__(self, key): return getattr(self, key) def __init__(self, **kwargs): FakeToken.id_count += 1 self.id = FakeToken.id_count for k, v in kwargs.items(): setattr(self, k, v) class FakeRequestContext(context.RequestContext): def __init__(self, *args, **kwargs): kwargs['auth_token'] = kwargs.get('auth_token', 'fake_auth_token') super(FakeRequestContext, self).__init__(*args, **kwargs) class HTTPRequest(os_wsgi.Request): @staticmethod def blank(*args, **kwargs): kwargs['base_url'] = 'http://localhost/v1' use_admin_context = kwargs.pop('use_admin_context', False) project_id = kwargs.pop('project_id', uuidsentinel.fake_project_id) version = kwargs.pop('version', api_version.DEFAULT_API_VERSION) out = os_wsgi.Request.blank(*args, **kwargs) out.environ['masakari.context'] = FakeRequestContext( user_id=uuidsentinel.fake_user_id, project_id=project_id, is_admin=use_admin_context) out.api_version_request = api_version.APIVersionRequest(version) return out class TestRouter(wsgi.Router): def __init__(self, controller, mapper=None): if not mapper: mapper = routes.Mapper() mapper.resource("test", "tests", controller=os_wsgi.Resource(controller)) super(TestRouter, self).__init__(mapper) class FakeAuthDatabase(object): data = {} @staticmethod def auth_token_get(context, token_hash): return FakeAuthDatabase.data.get(token_hash, None) @staticmethod def auth_token_create(context, token): fake_token = FakeToken(created_at=timeutils.utcnow(), **token) FakeAuthDatabase.data[fake_token.token_hash] = fake_token FakeAuthDatabase.data['id_%i' % fake_token.id] = fake_token return fake_token @staticmethod def auth_token_destroy(context, token_id): token = FakeAuthDatabase.data.get('id_%i' % token_id) if token and token.token_hash in FakeAuthDatabase.data: del FakeAuthDatabase.data[token.token_hash] del FakeAuthDatabase.data['id_%i' % token_id] def fake_get_available_languages(): existing_translations = ['en_GB', 'en_AU', 'de', 'zh_CN', 'en_US'] return existing_translations def fake_not_implemented(*args, **kwargs): raise NotImplementedError() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1751923813.5317101 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/0000775000175100017510000000000015033036146022472 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/__init__.py0000664000175100017510000000000015033036143024566 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/test_extension_info.py0000664000175100017510000000770615033036143027141 0ustar00mylesmyles# Copyright 2016 NTT DATA # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from masakari.api.openstack.ha import extension_info from masakari import policy from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base class fake_extension(object): def __init__(self, name, alias, description, version): self.name = name self.alias = alias self.__doc__ = description self.version = version fake_extensions = { 'ext1-alias': fake_extension('ext1', 'ext1-alias', 'ext1 description', 1), 'ext2-alias': fake_extension('ext2', 'ext2-alias', 'ext2 description', 2), 'ext3-alias': fake_extension('ext3', 'ext3-alias', 'ext3 description', 1) } simulated_extension_list = { 'segments': fake_extension('Segments', 'segments', 'Segments.', 1), 'hosts': fake_extension('Hosts', 'hosts', 'Hosts.', 2), 'os-fake': fake_extension('Cells', 'os-fake', 'Cells description', 1) } def fake_policy_authorize_selective(context, action, target): return action != 'os_masakari_api:ext1-alias:discoverable' class ExtensionInfoTest(base.NoDBTestCase): def setUp(self): super(ExtensionInfoTest, self).setUp() ext_info = extension_info.LoadedExtensionInfo() ext_info.extensions = fake_extensions self.controller = extension_info.ExtensionInfoController(ext_info) def _filter_extensions(self, res_dict): for e in [x for x in res_dict['extensions'] if '-alias' in x['alias']]: self.assertIn(e['alias'], fake_extensions) self.assertEqual(e['name'], fake_extensions[e['alias']].name) self.assertEqual(e['alias'], fake_extensions[e['alias']].alias) self.assertEqual(e['description'], fake_extensions[e['alias']].__doc__) self.assertEqual(e['updated'], "") self.assertEqual(e['links'], []) self.assertEqual(6, len(e)) @mock.patch.object(policy, 'authorize', mock.Mock(return_value=True)) def test_extension_info_list(self): req = fakes.HTTPRequest.blank('/extensions') res_dict = self.controller.index(req) self.assertGreaterEqual(len(res_dict['extensions']), 3) self._filter_extensions(res_dict) @mock.patch.object(policy, 'authorize', mock.Mock(return_value=True)) def test_extension_info_show(self): req = fakes.HTTPRequest.blank('/extensions/ext1-alias') res_dict = self.controller.show(req, 'ext1-alias') self.assertEqual(1, len(res_dict)) self.assertEqual(res_dict['extension']['name'], fake_extensions['ext1-alias'].name) self.assertEqual(res_dict['extension']['alias'], fake_extensions['ext1-alias'].alias) self.assertEqual(res_dict['extension']['description'], fake_extensions['ext1-alias'].__doc__) self.assertEqual(res_dict['extension']['updated'], "") self.assertEqual(res_dict['extension']['links'], []) self.assertEqual(6, len(res_dict['extension'])) @mock.patch.object(policy, 'authorize') def test_extension_info_list_not_all_discoverable(self, mock_authorize): mock_authorize.side_effect = fake_policy_authorize_selective req = fakes.HTTPRequest.blank('/extensions') res_dict = self.controller.index(req) self.assertGreaterEqual(len(res_dict['extensions']), 2) self._filter_extensions(res_dict) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/test_hosts.py0000664000175100017510000005540715033036143025253 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the hosts api.""" from http import HTTPStatus from unittest import mock import ddt from oslo_serialization import jsonutils from webob import exc from masakari.api.openstack.ha import hosts from masakari import exception from masakari.ha import api as ha_api from masakari.objects import base as obj_base from masakari.objects import host as host_obj from masakari.objects import segment as segment_obj from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base from masakari.tests.unit import fakes as fakes_data from masakari.tests import uuidsentinel def _make_host_obj(host_dict): return host_obj.Host(**host_dict) def _make_hosts_list(hosts_list): return host_obj.Host(objects=[ _make_host_obj(a) for a in hosts_list]) @ddt.ddt class HostTestCase(base.TestCase): """Test Case for host api.""" bad_request = exception.ValidationError def _set_up(self): self.controller = hosts.HostsController() self.req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts' % uuidsentinel.fake_segment1, use_admin_context=True) self.context = self.req.environ['masakari.context'] def setUp(self): super(HostTestCase, self).setUp() self._set_up() self.failover_segment = fakes_data.create_fake_failover_segment( name="segment1", id=1, description="failover_segment for compute", service_type="COMPUTE", recovery_method="auto", uuid=uuidsentinel.fake_segment ) self.host = fakes_data.create_fake_host( name="host_1", id=1, reserved=False, on_maintenance=False, type="fake", control_attributes="fake-control_attributes", uuid=uuidsentinel.fake_host_1, failover_segment=self.failover_segment ) self.host_2 = fakes_data.create_fake_host( name="host_2", id=2, reserved=False, on_maintenance=False, type="fake", control_attributes="fake-control_attributes", uuid=uuidsentinel.fake_host_2, failover_segment=self.failover_segment ) self.host_list = [self.host, self.host_2] self.host_list_obj = _make_hosts_list(self.host_list) @property def app(self): return fakes.wsgi_app_v1(init_only='os-hosts') def _assert_host_data(self, expected, actual): self.assertTrue(obj_base.obj_equal_prims(expected, actual), "The host objects were not equal") @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index(self, mock_get_all, mock_segment): mock_segment.return_value = mock.Mock() mock_get_all.return_value = self.host_list result = self.controller.index(self.req, uuidsentinel.fake_segment1) result = result['hosts'] self._assert_host_data(self.host_list_obj, _make_hosts_list(result)) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_valid_on_maintenance(self, mock_get_all, mock_segment): mock_segment.return_value = mock.Mock() self.host_list[0]['on_maintenance'] = True self.host_list[1]['on_maintenance'] = True mock_get_all.return_value = self.host_list for parameter in ['1', 't', 'true', 'on', 'y', 'yes']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?on_maintenance=''%s' % ( uuidsentinel.fake_segment1, parameter), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) for host in result['hosts']: self.assertTrue(host['on_maintenance']) self.host_list[0]['on_maintenance'] = False self.host_list[1]['on_maintenance'] = False mock_get_all.return_value = self.host_list for parameter in ['0', 'f', 'false', 'off', 'n', 'no']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?on_maintenance=''%s' % ( uuidsentinel.fake_segment1, parameter), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) for host in result['hosts']: self.assertFalse(host['on_maintenance']) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid_on_maintenance(self, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?on_maintenance=' 'abcd' % uuidsentinel.fake_segment1, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_valid_reserved(self, mock_get_all, mock_segment): self.host_list[0]['reserved'] = True self.host_list[1]['reserved'] = True mock_get_all.return_value = self.host_list for parameter in ['1', 't', 'true', 'on', 'y', 'yes']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?reserved=''%s' % ( uuidsentinel.fake_segment1, parameter ), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) for host in result['hosts']: self.assertTrue(host['reserved']) self.host_list[0]['reserved'] = False self.host_list[1]['reserved'] = False mock_get_all.return_value = self.host_list for parameter in ['0', 'f', 'false', 'off', 'n', 'no']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?reserved=''%s' % ( uuidsentinel.fake_segment1, parameter), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) for host in result['hosts']: self.assertFalse(host['reserved']) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid_reserved(self, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?reserved=' 'abcd' % uuidsentinel.fake_segment1, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_marker_not_found(self, mock_get_all, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?marker=123456' % ( uuidsentinel.fake_segment1), use_admin_context=True) mock_segment.return_value = mock.Mock() mock_get_all.side_effect = exception.MarkerNotFound(marker="123456") self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) def test_get_all_marker_negative(self): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?limit=-1' % ( uuidsentinel.fake_segment1), use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @ddt.data('sort_key', 'sort_dir') @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid(self, sort_by, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?%s=abcd' % ( uuidsentinel.fake_segment1, sort_by), use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @ddt.data([exception.MarkerNotFound(marker="123456"), "/v1/segments/%s/hosts?marker=123456", exc.HTTPBadRequest], [exception.FailoverSegmentNotFound( id=uuidsentinel.fake_segment1), "/v1/segments/%s/hosts", exc.HTTPNotFound]) @ddt.unpack @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_not_found(self, masakari_exc, url, exc, mock_get_all, mock_segment): mock_segment.return_value = mock.Mock() mock_get_all.side_effect = masakari_exc req = fakes.HTTPRequest.blank(url % uuidsentinel.fake_segment1, use_admin_context=True) self.assertRaises(exc, self.controller.index, req, uuidsentinel.fake_segment1) @mock.patch.object(ha_api.HostAPI, 'create_host') def test_create(self, mock_create): mock_create.return_value = self.host body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } result = self.controller.create(self.req, uuidsentinel.fake_segment1, body=body) result = result['host'] self._assert_host_data(self.host, _make_host_obj(result)) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.HostAPI, 'create_host') def test_create_success_with_201_response_code( self, mock_client, mock_create): body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } fake_req = self.req fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'POST' fake_req.body = jsonutils.dump_as_bytes(body) resp = fake_req.get_response(self.app) self.assertEqual(HTTPStatus.CREATED, resp.status_code) @mock.patch.object(ha_api.HostAPI, 'create_host') def test_create_with_duplicate_host_name(self, mock_create): mock_create.side_effect = (exception. HostExists(name='host-1')) body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, uuidsentinel.fake_segment1, body=body) @ddt.data( # no_host {"body": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}, # no_name {"body": { "host": { "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # name_with_leading_trailing_spaces {"body": { "host": { "name": " host-1 ", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # null_name {"body": { "host": { "name": "", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # name_too_long {"body": { "host": { "name": "host-1" * 255, "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # extra_invalid_arg {"body": { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes", "foo": "bar"}}}, # type too long {"body": { "host": { "name": "host-1", "type": "x" * 256, "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # type special characters {"body": { "host": { "name": "host-1", "type": "x_y", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}} ) @ddt.unpack def test_create_failure(self, body): self.assertRaises(self.bad_request, self.controller.create, self.req, uuidsentinel.fake_segment1, body=body) @mock.patch.object(ha_api.HostAPI, 'get_host') def test_show(self, mock_get_host): mock_get_host.return_value = self.host result = self.controller.show(self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) result = result['host'] self._assert_host_data(self.host, _make_host_obj(result)) @mock.patch.object(ha_api.HostAPI, 'get_host') def test_show_with_non_existing_id(self, mock_get_host): mock_get_host.side_effect = exception.HostNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, uuidsentinel.fake_segment1, "2") @mock.patch.object(ha_api.HostAPI, 'get_host') def test_show_non_assigned_failover_segment(self, mock_get_host): mock_get_host.side_effect = exception.HostNotFoundUnderFailoverSegment( host_uuid=uuidsentinel.fake_host_3, segment_uuid=uuidsentinel.fake_segment1) self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_3) @ddt.data( {"body": { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # only name {"body": {"host": {"name": "host-1"}}} ) @ddt.unpack @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update(self, mock_update_host, body): mock_update_host.return_value = self.host result = self.controller.update(self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=body) result = result['host'] self._assert_host_data(self.host, _make_host_obj(result)) @ddt.data( # no updates {"test_data": {"host": {}}}, # no update key {"test_data": {"asdf": {}}}, # wrong updates {"test_data": {"host": {"name": "disable", "foo": "bar"}}}, # null name {"test_data": {"host": {"name": ""}}}, # name too long {"test_data": {"host": {"name": "x" * 256}}}, # type too long {"test_data": {"host": {"type": "x" * 256}}}, # type with special characters {"test_data": {"host": {"type": "x_y"}}} ) @ddt.unpack def test_update_failure(self, test_data): self.assertRaises(self.bad_request, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=test_data) @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update_with_non_exising_host(self, mock_update_host): test_data = {"host": {"name": "host11"}} mock_update_host.side_effect = exception.HostNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.update, self.req, uuidsentinel.fake_segment1, "2", body=test_data) @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update_with_duplicated_name(self, mock_update_host): test_data = {"host": {"name": "host-1"}} mock_update_host.side_effect = exception.HostExists(name="host-1") self.assertRaises(exc.HTTPConflict, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=test_data) @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update_non_assigned_failover_segment(self, mock_update_host): test_data = {"host": {"name": "host-1"}} mock_update_host.side_effect = \ exception.HostNotFoundUnderFailoverSegment( host_uuid=uuidsentinel.fake_host_3, segment_uuid=uuidsentinel.fake_segment1) self.assertRaises(exc.HTTPNotFound, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_3, body=test_data) @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host(self, mock_delete): self.controller.delete(self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) self.assertTrue(mock_delete.called) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host_with_204_status(self, mock_client, mock_delete): url = '/v1/segments/%(segment)s/hosts/%(host)s' % { 'segment': uuidsentinel.fake_segment1, 'host': uuidsentinel.fake_host_1 } fake_req = fakes.HTTPRequest.blank(url, use_admin_context=True) fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'DELETE' resp = fake_req.get_response(self.app) self.assertEqual(HTTPStatus.NO_CONTENT, resp.status_code) @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host_not_found(self, mock_delete): mock_delete.side_effect = exception.HostNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.delete, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_3) @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host_not_found_for_failover_segment(self, mock_delete): mock_delete.side_effect = exception.HostNotFoundUnderFailoverSegment( host_uuid=uuidsentinel.fake_host_3, segment_uuid=uuidsentinel.fake_segment1) self.assertRaises(exc.HTTPNotFound, self.controller.delete, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_3) class HostTestCasePolicyNotAuthorized(base.NoDBTestCase): """Test Case for host non admin.""" def _set_up(self): self.controller = hosts.HostsController() self.req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts' % uuidsentinel.fake_segment1) self.context = self.req.environ['masakari.context'] def setUp(self): super(HostTestCasePolicyNotAuthorized, self).setUp() self._set_up() def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_index_no_admin(self): rule_name = "os_masakari_api:os-hosts:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req, uuidsentinel.fake_segment1) self._check_rule(exc, rule_name) def test_create_no_admin(self): rule_name = "os_masakari_api:os-hosts:create" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.create, self.req, uuidsentinel.fake_segment1, body=body) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:os-hosts:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) self._check_rule(exc, rule_name) def test_update_no_admin(self): rule_name = "os_masakari_api:os-hosts:update" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes", } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=body) self._check_rule(exc, rule_name) def test_delete_no_admin(self): rule_name = "os_masakari_api:os-hosts:delete" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.delete, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) self._check_rule(exc, rule_name) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/test_notifications.py0000664000175100017510000005251515033036143026761 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the notifications api.""" import copy from http import HTTPStatus from unittest import mock import ddt from oslo_serialization import jsonutils from oslo_utils import timeutils from webob import exc from masakari.api.openstack.ha import notifications from masakari.engine import rpcapi as engine_rpcapi from masakari import exception from masakari.ha import api as ha_api from masakari.objects import base as obj_base from masakari.objects import fields from masakari.objects import notification as notification_obj from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) OPTIONAL = ['recovery_workflow_details'] def _make_notification_obj(notification_dict): return notification_obj.Notification(**notification_dict) def _make_notification_progress_details_obj(progress_details): return notification_obj.NotificationProgressDetails(**progress_details) def _make_notifications_list(notifications_list): return notification_obj.Notification(objects=[ _make_notification_obj(a) for a in notifications_list]) NOTIFICATION_DATA = {"type": "VM", "id": 1, "payload": {'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE'}, "source_host_uuid": uuidsentinel.fake_host, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification, "failover_segment_uuid": uuidsentinel.fake_segment, "message": None, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0 } NOTIFICATION = _make_notification_obj(NOTIFICATION_DATA) RECOVERY_DETAILS = {"progress": 1.0, "state": "SUCCESS", "name": "StopInstanceTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:28", "message": "Stopping instance", "progress": "0.0"}, ]} NOTI_DATA_WITH_DETAILS = copy.deepcopy(NOTIFICATION_DATA) NOTIFICATION_WITH_PROGRESS_DETAILS = _make_notification_obj( NOTI_DATA_WITH_DETAILS) RECOVERY_OBJ = _make_notification_progress_details_obj(RECOVERY_DETAILS) NOTIFICATION_WITH_PROGRESS_DETAILS.recovery_workflow_details = [RECOVERY_OBJ] NOTIFICATION_LIST = [ {"type": "VM", "id": 1, "payload": {'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE'}, "source_host_uuid": uuidsentinel.fake_host, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification, "failover_segment_uuid": uuidsentinel.fake_segment, "message": None, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0}, {"type": "PROCESS", "id": 2, "payload": {'event': 'STOPPED', 'process_name': 'fake_process'}, "source_host_uuid": uuidsentinel.fake_host_1, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification1, "failover_segment_uuid": uuidsentinel.fake_segment, "message": None, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0}, ] NOTIFICATION_LIST = _make_notifications_list(NOTIFICATION_LIST) @ddt.ddt class NotificationTestCase(base.TestCase): """Test Case for notifications api.""" bad_request = exception.ValidationError @mock.patch.object(engine_rpcapi, 'EngineAPI') def setUp(self, mock_rpc): super(NotificationTestCase, self).setUp() self.controller = notifications.NotificationsController() self.req = fakes.HTTPRequest.blank('/v1/notifications', use_admin_context=True) self.context = self.req.environ['masakari.context'] @property def app(self): return fakes.wsgi_app_v1(init_only='os-hosts') def _assert_notification_data(self, expected, actual): self.assertTrue(obj_base.obj_equal_prims(expected, actual), "The notifications objects were not equal") @mock.patch.object(ha_api.NotificationAPI, 'get_all') def test_index(self, mock_get_all): mock_get_all.return_value = NOTIFICATION_LIST result = self.controller.index(self.req) result = result['notifications'] self._assert_notification_data(NOTIFICATION_LIST, _make_notifications_list(result)) @ddt.data( # limit negative "limit=-1", # invalid sort key "sort_key=abcd", # invalid sort dir "sort_dir=abcd") def test_index_invalid(self, param): req = fakes.HTTPRequest.blank("/v1/notifications?%s" % param, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) @mock.patch.object(ha_api.NotificationAPI, 'get_all') def test_index_marker_not_found(self, mock_get_all): fake_request = fakes.HTTPRequest.blank('/v1/notifications?marker=1234', use_admin_context=True) mock_get_all.side_effect = exception.MarkerNotFound(marker="1234") self.assertRaises(exc.HTTPBadRequest, self.controller.index, fake_request) def test_index_invalid_generated_since(self): req = fakes.HTTPRequest.blank('/v1/notifications?generated-since=abcd', use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) @mock.patch.object(ha_api.NotificationAPI, 'get_all') def test_index_valid_generated_since(self, mock_get_all): url = '/v1/notifications?generated-since=%s' % str(NOW) req = fakes.HTTPRequest.blank(url, use_admin_context=True) mock_get_all.return_value = NOTIFICATION_LIST result = self.controller.index(req) result = result['notifications'] self._assert_notification_data(NOTIFICATION_LIST, _make_notifications_list(result)) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create(self, mock_create): mock_create.return_value = NOTIFICATION result = self.controller.create(self.req, body={ "notification": { "hostname": "fake_host", "payload": { "instance_uuid": uuidsentinel.instance_uuid, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}) result = result['notification'] test_objects.compare_obj(self, result, NOTIFICATION_DATA, allow_missing=OPTIONAL) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_process_notification(self, mock_create): mock_create.return_value = NOTIFICATION result = self.controller.create(self.req, body={ "notification": { "hostname": "fake_host", "payload": { "process_name": "nova-compute", "event": "STOPPED" }, "type": "PROCESS", "generated_time": "2016-09-13T09:11:21.656788"}}) result = result['notification'] test_objects.compare_obj(self, result, NOTIFICATION_DATA, allow_missing=OPTIONAL) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_success_with_201_response_code( self, mock_client, mock_create): body = { "notification": { "hostname": "fake_host", "payload": { "instance_uuid": uuidsentinel.instance_uuid, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "type": "VM", "generated_time": NOW } } fake_req = self.req fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'POST' fake_req.body = jsonutils.dump_as_bytes(body) resp = fake_req.get_response(self.app) self.assertEqual(HTTPStatus.ACCEPTED, resp.status_code) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_host_not_found(self, mock_create): body = { "notification": { "hostname": "fake_host", "payload": { "instance_uuid": uuidsentinel.instance_uuid, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788" } } mock_create.side_effect = exception.HostNotFoundByName( host_name="fake_host") self.assertRaises(exc.HTTPBadRequest, self.controller.create, self.req, body=body) @ddt.data( # invalid type {"body": { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "Fake", "generated_time": "2016-09-13T09:11:21.656788"}}}, # no notification in body {"body": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}, # no payload {"body": {"notification": {"hostname": "fake_host", "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}}, # no hostname {"body": {"notification": {"payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}}, # no type {"body": {"notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "generated_time": "2016-09-13T09:11:21.656788"}}}, # no generated time {"body": {"notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", }}}, # hostname too long {"body": { "notification": {"hostname": "fake_host" * 255, "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}}, # extra invalid args {"body": { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788", "invalid_extra": "non_expected_parameter"}}} ) @ddt.unpack def test_create_failure(self, body): self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) @ddt.data( # invalid event for PROCESS type {"params": {"payload": {"event": "invalid", "process_name": "nova-compute"}, "type": fields.NotificationType.PROCESS}}, # invalid event for VM type {"params": {"payload": {"event": "invalid", "host_status": fields.HostStatusType.NORMAL, "cluster_status": fields.ClusterStatusType.ONLINE}, "type": fields.NotificationType.VM}}, # invalid event for HOST_COMPUTE type {"params": {"payload": {"event": "invalid"}, "type": fields.NotificationType.COMPUTE_HOST}}, # empty payload {"params": {"payload": {}, "type": fields.NotificationType.COMPUTE_HOST}}, # empty process_name {"params": {"payload": {"event": fields.EventType.STOPPED, "process_name": ""}, "type": fields.NotificationType.PROCESS}}, # process_name too long value {"params": {"payload": {"event": fields.EventType.STOPPED, "process_name": "a" * 4097}, "type": fields.NotificationType.PROCESS}}, # process_name invalid data_type {"params": {"payload": {"event": fields.EventType.STOPPED, "process_name": 123}, "type": fields.NotificationType.PROCESS}} ) @ddt.unpack def test_create_with_invalid_payload(self, params): body = { "notification": {"hostname": "fake_host", "generated_time": "2016-09-13T09:11:21.656788" } } body['notification']['payload'] = params['payload'] body['notification']['type'] = params['type'] self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_duplicate_notification(self, mock_create_notification): mock_create_notification.side_effect = exception.DuplicateNotification( type="COMPUTE_HOST") body = { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "COMPUTE_HOST", "generated_time": str(NOW)}} self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_host_on_maintenance(self, mock_create_notification): mock_create_notification.side_effect = ( exception.HostOnMaintenanceError(host_name="fake_host")) body = { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "COMPUTE_HOST", "generated_time": str(NOW)}} self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.NotificationAPI, 'get_notification') def test_show(self, mock_get_notification): mock_get_notification.return_value = NOTIFICATION result = self.controller.show(self.req, uuidsentinel.fake_notification) result = result['notification'] self._assert_notification_data(NOTIFICATION, _make_notification_obj(result)) @mock.patch.object(ha_api.NotificationAPI, 'get_notification') def test_show_with_non_existing_uuid(self, mock_get_notification): mock_get_notification.side_effect = exception.NotificationNotFound( id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, "2") @ddt.data('DELETE', 'PUT') @mock.patch('masakari.rpc.get_client') def test_delete_and_update_notification(self, method, mock_client): url = '/v1/notifications/%s' % uuidsentinel.fake_notification fake_req = fakes.HTTPRequest.blank(url, use_admin_context=True) fake_req.headers['Content-Type'] = 'application/json' fake_req.method = method resp = fake_req.get_response(self.app) self.assertEqual(HTTPStatus.METHOD_NOT_ALLOWED, resp.status_code) class NotificationCasePolicyNotAuthorized(base.NoDBTestCase): """Test Case for notifications non admin.""" @mock.patch.object(engine_rpcapi, 'EngineAPI') def setUp(self, mock_rpc): super(NotificationCasePolicyNotAuthorized, self).setUp() self.controller = notifications.NotificationsController() self.req = fakes.HTTPRequest.blank('/v1/notifications') self.context = self.req.environ['masakari.context'] def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_create_no_admin(self): rule_name = "os_masakari_api:notifications:create" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}} exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.create, self.req, body=body) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:notifications:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_notification) self._check_rule(exc, rule_name) def test_index_no_admin(self): rule_name = "os_masakari_api:notifications:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req) self._check_rule(exc, rule_name) class NotificationV1_1_TestCase(NotificationTestCase): """Test Case for notifications api for 1.1 API""" api_version = '1.1' @mock.patch.object(engine_rpcapi, 'EngineAPI') def setUp(self, mock_rpc): super(NotificationV1_1_TestCase, self).setUp() self.controller = notifications.NotificationsController() self.req = fakes.HTTPRequest.blank('/v1/notifications', use_admin_context=True, version=self.api_version) self.context = self.req.environ['masakari.context'] @mock.patch.object(ha_api.NotificationAPI, 'get_notification_recovery_workflow_details') def test_show(self, mock_get_notification_recovery_workflow_details): (mock_get_notification_recovery_workflow_details .return_value) = NOTIFICATION_WITH_PROGRESS_DETAILS result = self.controller.show(self.req, uuidsentinel.fake_notification) result = result['notification'] self.assertCountEqual([RECOVERY_OBJ], result.recovery_workflow_details) self._assert_notification_data(NOTIFICATION_WITH_PROGRESS_DETAILS, _make_notification_obj(result)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/test_segments.py0000664000175100017510000004351215033036143025732 0ustar00mylesmyles# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the failover segment api.""" from http import HTTPStatus from unittest import mock import ddt from oslo_serialization import jsonutils from webob import exc from masakari.api.openstack.ha import segments from masakari import exception from masakari.objects import segment as segment_obj from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base from masakari.tests import uuidsentinel def _make_segment_obj(segment_dict): return segment_obj.FailoverSegment(**segment_dict) def _make_segments_list(segments_list): return segment_obj.FailoverSegment(objects=[ _make_segment_obj(a) for a in segments_list]) FAILOVER_SEGMENT_LIST = [ {"name": "segment1", "id": "1", "service_type": "COMPUTE", "recovery_method": "auto", "uuid": uuidsentinel.fake_segment, "description": "failover_segment for compute"}, {"name": "segment2", "id": "2", "service_type": "CINDER", "recovery_method": "reserved_host", "uuid": uuidsentinel.fake_segment2, "description": "failover_segment for cinder"} ] FAILOVER_SEGMENT_LIST = _make_segments_list(FAILOVER_SEGMENT_LIST) FAILOVER_SEGMENT = {"name": "segment1", "id": "1", "service_type": "COMPUTE", "recovery_method": "auto", "uuid": uuidsentinel.fake_segment, "description": "failover_segment for compute"} FAILOVER_SEGMENT = _make_segment_obj(FAILOVER_SEGMENT) @ddt.ddt class FailoverSegmentTestCase(base.TestCase): """Test Case for failover segment api.""" bad_request = exception.ValidationError def setUp(self): super(FailoverSegmentTestCase, self).setUp() self.controller = segments.SegmentsController() self.req = fakes.HTTPRequest.blank('/v1/segments', use_admin_context=True) self.context = self.req.environ['masakari.context'] @property def app(self): return fakes.wsgi_app_v1(init_only='segments') @mock.patch('masakari.ha.api.FailoverSegmentAPI.get_all') def test_index(self, mock_get_all): mock_get_all.return_value = FAILOVER_SEGMENT_LIST result = self.controller.index(self.req) result = result['segments'] self.assertEqual(FAILOVER_SEGMENT_LIST, result) @mock.patch('masakari.ha.api.FailoverSegmentAPI.get_all') def test_index_marker_not_found(self, mock_get_all): fake_request = fakes.HTTPRequest.blank('/v1/segments?marker=12345', use_admin_context=True) mock_get_all.side_effect = exception.MarkerNotFound(marker="12345") self.assertRaises(exc.HTTPBadRequest, self.controller.index, fake_request) @ddt.data( # limit negative 'limit=-1', # invalid sort key 'sort_key=abcd', # invalid sort dir 'sort_dir=abcd') def test_index_invalid(self, param): req = fakes.HTTPRequest.blank("/v1/segments?%s" % param, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) @ddt.data( # simple case {"body": { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # empty description {"body": { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": ""}}}, # multiline description {"body": { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment\nfor\ncompute"}}}, ) @ddt.unpack @mock.patch('masakari.ha.api.FailoverSegmentAPI.create_segment') def test_create(self, mock_create, body): mock_create.return_value = FAILOVER_SEGMENT result = self.controller.create(self.req, body=body) mock_create.assert_called_once() args, kwargs = mock_create.call_args self.assertIn(body['segment'], args + tuple(kwargs.values())) result = result['segment'] self.assertEqual(FAILOVER_SEGMENT, result) @mock.patch('masakari.ha.api.FailoverSegmentAPI.create_segment') def test_create_with_duplicate_segment_name(self, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } mock_create.side_effect = (exception. FailoverSegmentExists(name='segment1')) self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, body=body) @mock.patch('masakari.ha.api.FailoverSegmentAPI.create_segment') def test_create_with_enabled_pre12(self, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute", "enabled": False } } mock_create.return_value = FAILOVER_SEGMENT self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) mock_create.assert_not_called() @mock.patch('masakari.ha.api.FailoverSegmentAPI.create_segment') def test_create_with_enabled_post12(self, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute", "enabled": False } } req = fakes.HTTPRequest.blank('/v1/segments', use_admin_context=True, version='1.2') mock_create.return_value = FAILOVER_SEGMENT result = self.controller.create(req, body=body) mock_create.assert_called_once() args, kwargs = mock_create.call_args self.assertIn(body['segment'], args + tuple(kwargs.values())) result = result['segment'] self.assertEqual(FAILOVER_SEGMENT, result) @mock.patch('masakari.rpc.get_client') @mock.patch('masakari.ha.api.FailoverSegmentAPI.create_segment') def test_create_success_with_201_response_code( self, mock_client, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } fake_req = self.req fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'POST' fake_req.body = jsonutils.dump_as_bytes(body) resp = fake_req.get_response(self.app) self.assertEqual(HTTPStatus.CREATED, resp.status_code) @ddt.data( # no segment {"body": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}, # no name {"body": { "segment": { "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # name with leading trailing spaces {"body": { "segment": { "name": " segment1 ", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # null name {"body": { "segment": { "name": "", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # name too long {"body": { "segment": { "name": "segment1" * 255, "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # extra invalid args {"body": { "segment": { "name": "segment1" * 255, "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute", "foo": "fake_foo"}}}, # description with invalid chars {"body": { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "\x00"}}}, ) @ddt.unpack @mock.patch('masakari.ha.api.FailoverSegmentAPI.create_segment') def test_create_failure(self, mock_create, body): mock_create.return_value = FAILOVER_SEGMENT self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) mock_create.assert_not_called() @mock.patch('masakari.ha.api.FailoverSegmentAPI.get_segment') def test_show(self, mock_get_segment): mock_get_segment.return_value = FAILOVER_SEGMENT result = self.controller.show(self.req, uuidsentinel.fake_segment) result = result['segment'] self.assertEqual(FAILOVER_SEGMENT, result) @mock.patch('masakari.ha.api.FailoverSegmentAPI.get_segment') def test_show_with_non_existing_id(self, mock_get_segment): mock_get_segment.side_effect = exception.FailoverSegmentNotFound( id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, "2") @ddt.data( {"body": {"segment": {"name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto"}}}, # with name only {"body": {"segment": {"name": "segment1"}}} ) @ddt.unpack @mock.patch('masakari.ha.api.FailoverSegmentAPI.update_segment') def test_update(self, mock_update_segment, body): mock_update_segment.return_value = FAILOVER_SEGMENT result = self.controller.update(self.req, uuidsentinel.fake_segment, body=body) result = result['segment'] self.assertEqual(FAILOVER_SEGMENT, result) @ddt.data( # no updates {"test_data": {"segment": {}}}, # no update key {"test_data": {"asdf": {}}}, # wrong updates {"test_data": {"segment": {"name": "disable", "foo": "bar"}}}, # null name {"test_data": {"segment": {"name": ""}}}, # name too long {"test_data": {"segment": {"name": "x" * 256}}} ) @ddt.unpack def test_update_failure(self, test_data): self.assertRaises(self.bad_request, self.controller.update, self.req, uuidsentinel.fake_segment, body=test_data) @mock.patch('masakari.ha.api.FailoverSegmentAPI.update_segment') def test_update_with_non_exising_segment(self, mock_update_segment): test_data = {"segment": {"name": "segment11"}} mock_update_segment.side_effect = exception.FailoverSegmentNotFound( id="2") self.assertRaises(exc.HTTPNotFound, self.controller.update, self.req, "2", body=test_data) @mock.patch('masakari.ha.api.FailoverSegmentAPI.update_segment') def test_update_with_duplicated_name(self, mock_update_segment): test_data = {"segment": {"name": "segment1"}} mock_update_segment.side_effect = exception.FailoverSegmentExists( name="segment1") self.assertRaises(exc.HTTPConflict, self.controller.update, self.req, uuidsentinel.fake_segment, body=test_data) @mock.patch('masakari.ha.api.FailoverSegmentAPI.update_segment') def test_update_with_enabled_pre12(self, mock_update_segment): body = { "segment": { "enabled": False } } mock_update_segment.return_value = FAILOVER_SEGMENT self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) mock_update_segment.assert_not_called() @mock.patch('masakari.ha.api.FailoverSegmentAPI.update_segment') def test_update_with_enabled_post12(self, mock_update_segment): body = { "segment": { "enabled": False } } req = fakes.HTTPRequest.blank('/v1/segments', use_admin_context=True, version='1.2') mock_update_segment.return_value = FAILOVER_SEGMENT result = self.controller.update(req, uuidsentinel.fake_segment, body=body) mock_update_segment.assert_called_once() args, kwargs = mock_update_segment.call_args self.assertIn(body['segment'], args + tuple(kwargs.values())) result = result['segment'] self.assertEqual(FAILOVER_SEGMENT, result) @mock.patch('masakari.ha.api.FailoverSegmentAPI.delete_segment') def test_delete_segment(self, mock_delete): self.controller.delete(self.req, uuidsentinel.fake_segment) self.assertTrue(mock_delete.called) @mock.patch('masakari.ha.api.FailoverSegmentAPI.delete_segment') def test_delete_segment_not_found(self, mock_delete): mock_delete.side_effect = exception.FailoverSegmentNotFound( id=uuidsentinel.fake_segment) self.assertRaises(exc.HTTPNotFound, self.controller.delete, self.req, uuidsentinel.fake_segment) @mock.patch('masakari.rpc.get_client') @mock.patch('masakari.ha.api.FailoverSegmentAPI.delete_segment') def test_delete_segment_with_204_status(self, mock_client, mock_delete): url = '/v1/segments/%s' % uuidsentinel.fake_segment fake_req = fakes.HTTPRequest.blank(url, use_admin_context=True) fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'DELETE' resp = fake_req.get_response(self.app) self.assertEqual(HTTPStatus.NO_CONTENT, resp.status_code) class FailoverSegmentTestCasePolicyNotAuthorized(base.NoDBTestCase): """Test Case for failover segment non admin.""" def setUp(self): super(FailoverSegmentTestCasePolicyNotAuthorized, self).setUp() self.controller = segments.SegmentsController() self.req = fakes.HTTPRequest.blank('/v1/segments') self.context = self.req.environ['masakari.context'] def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_index_no_admin(self): rule_name = "os_masakari_api:segments:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req) self._check_rule(exc, rule_name) def test_create_no_admin(self): rule_name = "os_masakari_api:segments:create" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.create, self.req, body=body) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:segments:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_segment) self._check_rule(exc, rule_name) def test_update_no_admin(self): rule_name = "os_masakari_api:segments:update" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.update, self.req, uuidsentinel.fake_segment, body=body) self._check_rule(exc, rule_name) def test_delete_no_admin(self): rule_name = "os_masakari_api:segments:delete" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.delete, self.req, uuidsentinel.fake_segment) self._check_rule(exc, rule_name) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/test_versions.py0000664000175100017510000002012015033036143025743 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from http import HTTPStatus from unittest import mock from oslo_serialization import jsonutils import webob from masakari.api import api_version_request as avr from masakari.api.openstack.ha.views import versions from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base NS = { 'atom': 'http://www.w3.org/2005/Atom', 'ns': 'http://docs.openstack.org/common/api/v1.0' } MAX_API_VERSION = avr.max_api_version().get_string() EXP_LINKS = {'v1.0': {'html': 'http://docs.openstack.org/', }} EXP_VERSIONS = { "v1.0": { "id": "v1.0", "status": "SUPPORTED", "version": "", "min_version": "", "updated": "2011-01-21T11:33:21Z", "links": [ { "rel": "describedby", "type": "text/html", "href": EXP_LINKS['v1.0']['html'], }, ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.ha+json;version=1", }, ], }, "v1": { "id": "v1", "status": "CURRENT", "version": MAX_API_VERSION, "min_version": "1.0", "updated": "2013-07-23T11:33:21Z", "links": [ { "rel": "self", "href": "http://localhost/v1/", }, { "rel": "describedby", "type": "text/html", "href": EXP_LINKS['v1.0']['html'], }, ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.ha+json;version=1.0", } ], } } def _get_self_href(response): """Extract the URL to self from response data.""" data = jsonutils.loads(response.body) for link in data['versions'][0]['links']: if link['rel'] == 'self': return link['href'] return '' class VersionsViewBuilderTests(base.NoDBTestCase): def test_view_builder(self): base_url = "http://example.org/" version_data = { "v3.2.1": { "id": "3.2.1", "status": "CURRENT", "version": "1", "min_version": "1.0", "updated": "2011-07-18T11:30:00Z", } } expected = { "versions": [ { "id": "3.2.1", "status": "CURRENT", "version": "1", "min_version": "1.0", "updated": "2011-07-18T11:30:00Z", "links": [ { "rel": "self", "href": "http://example.org/v1/", }, ], } ] } builder = versions.ViewBuilder(base_url) output = builder.build_versions(version_data) self.assertEqual(expected, output) def _test_view_builder_osapi_ha_link_prefix(self, href=None): base_url = "http://example.org/v1/" if href is None: href = base_url version_data = { "id": "v1", "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2013-07-23T11:33:21Z", "links": [ { "rel": "describedby", "type": "text/html", "href": EXP_LINKS['v1.0']['html'], } ], "media-types": [ { "base": "application/json", "type": ("application/vnd.openstack." "ha+json;version=1.0") } ], } expected_data = copy.deepcopy(version_data) expected = {'version': expected_data} expected['version']['links'].insert(0, {"rel": "self", "href": href, }) builder = versions.ViewBuilder(base_url) output = builder.build_version(version_data) self.assertEqual(expected, output) def test_view_builder_without_osapi_ha_link_prefix(self): self._test_view_builder_osapi_ha_link_prefix() def test_generate_href(self): base_url = "http://example.org/app/" expected = "http://example.org/app/v1/" builder = versions.ViewBuilder(base_url) actual = builder.generate_href('v1') self.assertEqual(expected, actual) def test_generate_href_unknown(self): base_url = "http://example.org/app/" expected = "http://example.org/app/v1/" builder = versions.ViewBuilder(base_url) actual = builder.generate_href('foo') self.assertEqual(expected, actual) def test_generate_href_with_path(self): path = "random/path" base_url = "http://example.org/app/" expected = "http://example.org/app/v1/%s" % path builder = versions.ViewBuilder(base_url) actual = builder.generate_href("v1", path) self.assertEqual(actual, expected) def test_generate_href_with_empty_path(self): path = "" base_url = "http://example.org/app/" expected = "http://example.org/app/v1/" builder = versions.ViewBuilder(base_url) actual = builder.generate_href("v1", path) self.assertEqual(actual, expected) class VersionsTest(base.NoDBTestCase): exp_versions = copy.deepcopy(EXP_VERSIONS) exp_versions['v1.0']['links'].insert(0, { 'href': 'http://localhost/v1/', 'rel': 'self'}, ) @property def wsgi_app(self): return fakes.wsgi_app_v1(init_only=('versions',)) def _test_v1(self, path): req = webob.Request.blank(path) req.accept = "application/json" res = req.get_response(self.wsgi_app) self.assertEqual(200, res.status_int) self.assertEqual("application/json", res.content_type) version = jsonutils.loads(res.body) expected = { "version": { "id": "v1.0", "status": "CURRENT", "version": "1.3", "min_version": "1.0", "updated": "2016-07-01T11:33:21Z", "links": [ { "rel": "self", "href": "http://localhost/v1/", }, { "rel": "describedby", "type": "text/html", "href": "https://docs.openstack.org/", }, ], "media-types": [ { "base": "application/json", "type": "application/" "vnd.openstack.masakari+json;version=1", }, ], }, } self.assertEqual(expected, version) @mock.patch('masakari.rpc.get_client') def test_get_version_1_detail(self, mock_get_client): self._test_v1('/v1/') @mock.patch('masakari.rpc.get_client') def test_get_version_1_detail_no_slash(self, mock_get_client): self._test_v1('/v1') @mock.patch('masakari.rpc.get_client') def test_get_version_1_versions_invalid(self, mock_get_client): req = webob.Request.blank('/v1/versions/1234/foo') req.accept = "application/json" res = req.get_response(self.wsgi_app) self.assertEqual(HTTPStatus.NOT_FOUND, res.status_int) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/ha/test_vmoves.py0000664000175100017510000001772015033036143025426 0ustar00mylesmyles# Copyright(c) 2022 Inspur # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for the vmoves api.""" from unittest import mock import ddt from webob import exc from masakari.api.openstack.ha import vmoves from masakari import exception from masakari.ha import api as ha_api from masakari.objects import base as obj_base from masakari.objects import notification as notification_obj from masakari.objects import vmove as vmove_obj from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base from masakari.tests.unit import fakes as fakes_data from masakari.tests import uuidsentinel def _make_vmove_obj(vmove_dict): return vmove_obj.VMove(**vmove_dict) def _make_vmoves_list(vmove_list): return vmove_obj.VMove(objects=[ _make_vmove_obj(a) for a in vmove_list]) @ddt.ddt class VMoveTestCase(base.TestCase): """Test Case for vmove api.""" bad_request = exception.ValidationError def _set_up(self): self.controller = vmoves.VMovesController() self.req = fakes.HTTPRequest.blank( '/v1/notifications/%s/vmoves' % ( uuidsentinel.fake_notification1), use_admin_context=True) self.context = self.req.environ['masakari.context'] def setUp(self): super(VMoveTestCase, self).setUp() self._set_up() self.host_type_notification = fakes_data.create_fake_notification( id=1, type="COMPUTE_HOST", source_host_uuid=uuidsentinel.fake_host_1, status="running", notification_uuid=uuidsentinel.fake_host_type_notification, payload={'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE'} ) self.vm_type_notification = fakes_data.create_fake_notification( id=1, type="VM", source_host_uuid=uuidsentinel.fake_host_2, status="running", notification_uuid=uuidsentinel.fake_vm_type_notification, payload={'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE'} ) self.vmove_1 = fakes_data.create_fake_vmove( id=1, uuid=uuidsentinel.fake_vmove_1, notification_uuid=self.host_type_notification.notification_uuid, instance_uuid=uuidsentinel.fake_instance_1, instance_name='vm-1', source_host='node01', dest_host='node02', start_time='2022-11-22 14:50:22', end_time="2022-11-22 14:50:35", type="evacuation", status='succeeded', message=None ) self.vmove_2 = fakes_data.create_fake_vmove( id=1, uuid=uuidsentinel.fake_vmove_1, notification_uuid=self.host_type_notification.notification_uuid, instance_uuid=uuidsentinel.fake_instance_1, instance_name='vm-1', source_host='node01', dest_host='node02', start_time="2022-11-22 14:50:23", end_time="2022-11-22 14:50:38", type="evacuation", status='succeeded', message=None ) self.vmove_list = [self.vmove_1, self.vmove_2] self.vmove_list_obj = _make_vmoves_list(self.vmove_list) @property def app(self): return fakes.wsgi_app_v1(init_only='vmoves') def _assert_vmove_data(self, expected, actual): self.assertTrue(obj_base.obj_equal_prims(expected, actual), "The vmove objects were not equal") @mock.patch.object(notification_obj.Notification, 'get_by_uuid') @mock.patch.object(ha_api.VMoveAPI, 'get_all') def test_index(self, mock_get_all, mock_notification): mock_notification.return_value = mock.Mock() mock_get_all.return_value = self.vmove_list result = self.controller.index( self.req, uuidsentinel.fake_host_type_notification) result = result['vmoves'] self._assert_vmove_data(self.vmove_list_obj, _make_vmoves_list(result)) @ddt.data('sort_key', 'sort_dir') @mock.patch.object(notification_obj.Notification, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid(self, sort_by, mock_notification): req = fakes.HTTPRequest.blank( '/v1/notifications/%s/vmoves?%s=abcd' % ( uuidsentinel.fake_notification, sort_by), use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_notification1) @mock.patch.object(notification_obj.Notification, 'get_by_uuid') @mock.patch.object(ha_api.VMoveAPI, 'get_all') def test_index_with_valid_notification(self, mock_get_all, mock_notification): mock_notification.return_value = mock.Mock() mock_get_all.side_effect = exception.NotificationWithoutVMoves req = fakes.HTTPRequest.blank('/v1/notifications/%s/vmoves' % ( uuidsentinel.fake_vm_type_notification), use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_notification1) @mock.patch.object(ha_api.VMoveAPI, 'get_vmove') def test_show(self, mock_get_vmove): mock_get_vmove.return_value = self.vmove_1 result = self.controller.show(self.req, uuidsentinel.fake_notification1, uuidsentinel.fake_vmove_1) vmove = result['vmove'] self._assert_vmove_data(self.vmove_1, _make_vmove_obj(vmove)) @mock.patch.object(ha_api.VMoveAPI, 'get_vmove') def test_show_with_non_existing_id(self, mock_get_vmove): mock_get_vmove.side_effect = exception.VMoveNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, uuidsentinel.fake_notification1, "2") class VMoveTestCasePolicyNotAuthorized(base.NoDBTestCase): """Test Case for vmove non admin.""" def _set_up(self): self.controller = vmoves.VMovesController() self.req = fakes.HTTPRequest.blank( '/v1/notifications/%s/vmoves' % ( uuidsentinel.fake_notification1)) self.context = self.req.environ['masakari.context'] def setUp(self): super(VMoveTestCasePolicyNotAuthorized, self).setUp() self._set_up() def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_index_no_admin(self): rule_name = "os_masakari_api:vmoves:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req, uuidsentinel.fake_notification1) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:vmoves:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_notification1, uuidsentinel.fake_vmove_1) self._check_rule(exc, rule_name) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/test_common.py0000664000175100017510000003420515033036143025004 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Test suites for 'common' code used throughout the OpenStack HTTP API. """ from testtools import matchers from unittest import mock import webob from masakari.api.openstack import common from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base from masakari.tests import uuidsentinel class MiscFunctionsTest(base.TestCase): def test_remove_trailing_version_from_href(self): fixture = 'http://www.testsite.com/v1.1' expected = 'http://www.testsite.com' actual = common.remove_trailing_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_trailing_version_from_href_2(self): fixture = 'http://www.testsite.com/ha/v1.1' expected = 'http://www.testsite.com/ha' actual = common.remove_trailing_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_trailing_version_from_href_3(self): fixture = 'http://www.testsite.com/v1.1/images/v10.5' expected = 'http://www.testsite.com/v1.1/images' actual = common.remove_trailing_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_trailing_version_from_href_bad_request(self): fixture = 'http://www.testsite.com/v1.1/images' self.assertRaises(ValueError, common.remove_trailing_version_from_href, fixture) def test_remove_trailing_version_from_href_bad_request_2(self): fixture = 'http://www.testsite.com/images/v' self.assertRaises(ValueError, common.remove_trailing_version_from_href, fixture) def test_remove_trailing_version_from_href_bad_request_3(self): fixture = 'http://www.testsite.com/v1.1images' self.assertRaises(ValueError, common.remove_trailing_version_from_href, fixture) class TestCollectionLinks(base.NoDBTestCase): """Tests the _get_collection_links method.""" @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_less_than_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict(limit=10)) type(req).params = params builder = common.ViewBuilder() results = builder._get_collection_links(req, items, "ignored", "uuid") self.assertFalse(href_link_mock.called) self.assertThat(results, matchers.HasLength(0)) @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_given_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict(limit=1)) type(req).params = params builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_default_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict()) type(req).params = params self.flags(osapi_max_limit=1) builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_default_limit_with_given(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() # Given limit is greater than default max, only return default max params = mock.PropertyMock(return_value=dict(limit=2)) type(req).params = params self.flags(osapi_max_limit=1) builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) class LinkPrefixTest(base.NoDBTestCase): def test_update_link_prefix(self): vb = common.ViewBuilder() result = vb._update_link_prefix("http://192.168.0.243:24/", "http://127.0.0.1/ha") self.assertEqual("http://127.0.0.1/ha", result) result = vb._update_link_prefix("http://foo.x.com/v1", "http://new.prefix.com") self.assertEqual("http://new.prefix.com/v1", result) result = vb._update_link_prefix("http://foo.x.com/v1", "http://new.prefix.com:20455/" "new_extra_prefix") self.assertEqual("http://new.prefix.com:20455/new_extra_prefix/v1", result) class UrlJoinTest(base.NoDBTestCase): def test_url_join(self): pieces = ["one", "two", "three"] joined = common.url_join(*pieces) self.assertEqual("one/two/three", joined) def test_url_join_extra_slashes(self): pieces = ["one/", "/two//", "/three/"] joined = common.url_join(*pieces) self.assertEqual("one/two/three", joined) def test_url_join_trailing_slash(self): pieces = ["one", "two", "three", ""] joined = common.url_join(*pieces) self.assertEqual("one/two/three/", joined) def test_url_join_empty_list(self): pieces = [] joined = common.url_join(*pieces) self.assertEqual("", joined) def test_url_join_single_empty_string(self): pieces = [""] joined = common.url_join(*pieces) self.assertEqual("", joined) def test_url_join_single_slash(self): pieces = ["/"] joined = common.url_join(*pieces) self.assertEqual("", joined) class ViewBuilderLinkTest(base.NoDBTestCase): project_id = uuidsentinel.fake_project_id api_version = "1.0" def setUp(self): super(ViewBuilderLinkTest, self).setUp() self.request = self.req("/%s" % self.project_id) self.vb = common.ViewBuilder() def req(self, url, use_admin_context=False): return fakes.HTTPRequest.blank(url, use_admin_context=use_admin_context, version=self.api_version) def test_get_project_id(self): proj_id = self.vb._get_project_id(self.request) self.assertEqual(self.project_id, proj_id) def test_get_next_link(self): identifier = "identifier" collection = "collection" next_link = self.vb._get_next_link(self.request, identifier, collection) expected = "/".join((self.request.url, "%s?marker=%s" % (collection, identifier))) self.assertEqual(expected, next_link) def test_get_href_link(self): identifier = "identifier" collection = "collection" href_link = self.vb._get_href_link(self.request, identifier, collection) expected = "/".join((self.request.url, collection, identifier)) self.assertEqual(expected, href_link) def test_get_bookmark_link(self): identifier = "identifier" collection = "collection" bookmark_link = self.vb._get_bookmark_link(self.request, identifier, collection) bmk_url = ( common.remove_trailing_version_from_href(( self.request.application_url))) expected = "/".join((bmk_url, self.project_id, collection, identifier)) self.assertEqual(expected, bookmark_link) class PaginationParamsTest(base.NoDBTestCase): """Unit tests for the `masakari.api.openstack.common.get_pagination_params` method which takes in a request object and returns 'marker' and 'limit' GET params. """ def test_no_params(self): # Test no params. req = webob.Request.blank('/') self.assertEqual(common.get_pagination_params(req), {}) def test_valid_marker(self): # Test valid marker param. req = webob.Request.blank('/?marker=263abb28-1de6-412f-b00' 'b-f0ee0c4333c2') self.assertEqual(common.get_pagination_params(req), {'marker': '263abb28-1de6-412f-b00b-f0ee0c4333c2'}) def test_valid_limit(self): # Test valid limit param. req = webob.Request.blank('/?limit=10') self.assertEqual(common.get_pagination_params(req), {'limit': 10}) def test_invalid_limit(self): # Test invalid limit param. req = webob.Request.blank('/?limit=-2') self.assertRaises( webob.exc.HTTPBadRequest, common.get_pagination_params, req) def test_valid_limit_and_marker(self): # Test valid limit and marker parameters. marker = '263abb28-1de6-412f-b00b-f0ee0c4333c2' req = webob.Request.blank('/?limit=20&marker=%s' % marker) self.assertEqual(common.get_pagination_params(req), {'marker': marker, 'limit': 20}) def test_valid_page_size(self): # Test valid page_size param. req = webob.Request.blank('/?page_size=10') self.assertEqual(common.get_pagination_params(req), {'page_size': 10}) def test_invalid_page_size(self): # Test invalid page_size param. req = webob.Request.blank('/?page_size=-2') self.assertRaises( webob.exc.HTTPBadRequest, common.get_pagination_params, req) def test_valid_limit_and_page_size(self): # Test valid limit and page_size parameters. req = webob.Request.blank('/?limit=20&page_size=5') self.assertEqual(common.get_pagination_params(req), {'page_size': 5, 'limit': 20}) class SortParamTest(base.NoDBTestCase): def test_get_sort_params_defaults(self): # Verifies the default sort key and direction. sort_keys, sort_dirs = common.get_sort_params({}) self.assertEqual(['created_at'], sort_keys) self.assertEqual(['desc'], sort_dirs) def test_get_sort_params_override_defaults(self): # Verifies that the defaults can be overriden. sort_keys, sort_dirs = common.get_sort_params({}, default_key='key1', default_dir='dir1') self.assertEqual(['key1'], sort_keys) self.assertEqual(['dir1'], sort_dirs) sort_keys, sort_dirs = common.get_sort_params({}, default_key=None, default_dir=None) self.assertEqual([], sort_keys) self.assertEqual([], sort_dirs) def test_get_sort_params_single_value(self): # Verifies a single sort key and direction. params = webob.multidict.MultiDict() params.add('sort_key', 'key1') params.add('sort_dir', 'dir1') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['key1'], sort_keys) self.assertEqual(['dir1'], sort_dirs) def test_get_sort_params_single_with_default(self): # Verifies a single sort value with a default. params = webob.multidict.MultiDict() params.add('sort_key', 'key1') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['key1'], sort_keys) # sort_key was supplied, sort_dir should be defaulted self.assertEqual(['desc'], sort_dirs) params = webob.multidict.MultiDict() params.add('sort_dir', 'dir1') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['created_at'], sort_keys) # sort_dir was supplied, sort_key should be defaulted self.assertEqual(['dir1'], sort_dirs) def test_get_sort_params_multiple_values(self): # Verifies multiple sort parameter values. params = webob.multidict.MultiDict() params.add('sort_key', 'key1') params.add('sort_key', 'key2') params.add('sort_key', 'key3') params.add('sort_dir', 'dir1') params.add('sort_dir', 'dir2') params.add('sort_dir', 'dir3') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['key1', 'key2', 'key3'], sort_keys) self.assertEqual(['dir1', 'dir2', 'dir3'], sort_dirs) # Also ensure that the input parameters are not modified sort_key_vals = [] sort_dir_vals = [] while 'sort_key' in params: sort_key_vals.append(params.pop('sort_key')) while 'sort_dir' in params: sort_dir_vals.append(params.pop('sort_dir')) self.assertEqual(['key1', 'key2', 'key3'], sort_key_vals) self.assertEqual(['dir1', 'dir2', 'dir3'], sort_dir_vals) self.assertEqual(0, len(params)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/test_extensions.py0000664000175100017510000000653715033036143025722 0ustar00mylesmyles# Copyright 2016 NTT DATA # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus from unittest import mock from oslo_config import cfg import webob.exc from masakari.api.openstack import extensions from masakari.api.openstack import ha from masakari.api.openstack.ha import extension_info from masakari import exception from masakari.tests.unit import base CONF = cfg.CONF class fake_bad_extension(object): name = "fake_bad_extension" alias = "fake-bad" class ExtensionLoadingTestCase(base.NoDBTestCase): @mock.patch('masakari.rpc.get_client') def test_extensions_loaded(self, mock_get_client): app = ha.APIRouterV1() self.assertIn('extensions', app._loaded_extension_info.extensions) def test_check_bad_extension(self): loaded_ext_info = extension_info.LoadedExtensionInfo() self.assertFalse(loaded_ext_info._check_extension(fake_bad_extension)) @mock.patch('masakari.rpc.get_client') @mock.patch('masakari.api.openstack.APIRouterV1._register_resources_list') def test_extensions_inherit(self, mock_register, mock_get_client): app = ha.APIRouterV1() self.assertIn('extensions', app._loaded_extension_info.extensions) ext_no_inherits = mock_register.call_args_list[0][0][0] mock_register.assert_called_with(mock.ANY, mock.ANY) name_list = [ext.obj.alias for ext in ext_no_inherits] self.assertIn('extensions', name_list) def test_extensions_expected_error(self): @extensions.expected_errors(HTTPStatus.NOT_FOUND) def fake_func(): raise webob.exc.HTTPNotFound() self.assertRaises(webob.exc.HTTPNotFound, fake_func) def test_extensions_expected_error_from_list(self): @extensions.expected_errors((HTTPStatus.NOT_FOUND, HTTPStatus.FORBIDDEN)) def fake_func(): raise webob.exc.HTTPNotFound() self.assertRaises(webob.exc.HTTPNotFound, fake_func) def test_extensions_unexpected_error(self): @extensions.expected_errors(HTTPStatus.NOT_FOUND) def fake_func(): raise webob.exc.HTTPConflict() self.assertRaises(webob.exc.HTTPInternalServerError, fake_func) def test_extensions_unexpected_error_from_list(self): @extensions.expected_errors((HTTPStatus.NOT_FOUND, HTTPStatus.REQUEST_ENTITY_TOO_LARGE)) def fake_func(): raise webob.exc.HTTPConflict() self.assertRaises(webob.exc.HTTPInternalServerError, fake_func) def test_extensions_unexpected_policy_not_authorized_error(self): @extensions.expected_errors(HTTPStatus.NOT_FOUND) def fake_func(): raise exception.PolicyNotAuthorized(action="foo") self.assertRaises(exception.PolicyNotAuthorized, fake_func) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/api/openstack/test_wsgi.py0000664000175100017510000011164515033036143024471 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus import inspect from unittest import mock import testscenarios import webob from oslo_serialization import jsonutils from masakari.api import api_version_request as api_version from masakari.api.openstack import extensions from masakari.api.openstack import wsgi from masakari.api import versioned_method from masakari import exception from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import base class MicroversionedTest(testscenarios.WithScenarios, base.NoDBTestCase): header_name = 'OpenStack-API-Version' def _make_microversion_header(self, value): return {self.header_name: 'instance-ha %s' % value} class RequestTest(MicroversionedTest): def test_content_type_missing(self): request = wsgi.Request.blank('/tests/123', method='POST') request.body = b"" self.assertIsNone(request.get_content_type()) def test_content_type_unsupported(self): request = wsgi.Request.blank('/tests/123', method='POST') request.headers["Content-Type"] = "text/html" request.body = b"asdf
" self.assertRaises(exception.InvalidContentType, request.get_content_type) def test_content_type_with_charset(self): request = wsgi.Request.blank('/tests/123') request.headers["Content-Type"] = "application/json; charset=UTF-8" result = request.get_content_type() self.assertEqual(result, "application/json") def test_content_type_accept_default(self): request = wsgi.Request.blank('/tests/123.unsupported') request.headers["Accept"] = "application/unsupported1" result = request.best_match_content_type() self.assertEqual(result, "application/json") def test_from_request(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'bogus;q=1, en-gb;q=0.7,en-us,en;q=0.5,*;q=0.7' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'en_US') def test_asterisk(self): # asterisk should match first available if there # are not any other available matches self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = '*,es;q=0.5' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'en_GB') def test_prefix(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'zh' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'zh_CN') def test_secondary(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'nn,en-gb;q=0.5' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'en_GB') def test_none_found(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'nb-no' request.headers = {'Accept-Language': accepted} self.assertIsNone(request.best_match_language()) def test_no_lang_header(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = '' request.headers = {'Accept-Language': accepted} self.assertIsNone(request.best_match_language()) def test_api_version_request_header_none(self): request = wsgi.Request.blank('/') request.set_api_version_request() self.assertEqual(api_version.APIVersionRequest( api_version.DEFAULT_API_VERSION), request.api_version_request) @mock.patch("masakari.api.api_version_request.max_api_version") def test_api_version_request_header(self, mock_maxver): mock_maxver.return_value = api_version.APIVersionRequest("1.0") request = wsgi.Request.blank('/') request.headers = self._make_microversion_header('1.0') request.set_api_version_request() self.assertEqual(api_version.APIVersionRequest("1.0"), request.api_version_request) def test_api_version_request_header_invalid(self): request = wsgi.Request.blank('/') request.headers = self._make_microversion_header('1.1.1') self.assertRaises(exception.InvalidAPIVersionString, request.set_api_version_request) class ActionDispatcherTest(base.NoDBTestCase): def test_dispatch(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: 'pants' self.assertEqual(serializer.dispatch({}, action='create'), 'pants') def test_dispatch_action_None(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: 'pants' serializer.default = lambda x: 'trousers' self.assertEqual(serializer.dispatch({}, action=None), 'trousers') def test_dispatch_default(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: 'pants' serializer.default = lambda x: 'trousers' self.assertEqual(serializer.dispatch({}, action='update'), 'trousers') class JSONDictSerializerTest(base.NoDBTestCase): def test_json(self): input_dict = dict(segments=dict(a=(2, 3))) expected_json = '{"segments":{"a":[2,3]}}' serializer = wsgi.JSONDictSerializer() result = serializer.serialize(input_dict) result = result.replace('\n', '').replace(' ', '') self.assertEqual(result, expected_json) class JSONDeserializerTest(base.NoDBTestCase): def test_json(self): data = """{"a": { "a1": "1", "a2": "2", "bs": ["1", "2", "3", {"c": {"c1": "1"}}], "d": {"e": "1"}, "f": "1"}}""" as_dict = { 'body': { 'a': { 'a1': '1', 'a2': '2', 'bs': ['1', '2', '3', {'c': {'c1': '1'}}], 'd': {'e': '1'}, 'f': '1', }, }, } deserializer = wsgi.JSONDeserializer() self.assertEqual(deserializer.deserialize(data), as_dict) def test_json_valid_utf8(self): data = b"""{"segment": {"recovery_method": "auto", "name": "\xe6\xa6\x82\xe5\xbf\xb5", "service_type": "COMPUTE_HOST" }} """ as_dict = { 'body': { 'segment': {'recovery_method': 'auto', 'name': '\u6982\u5ff5', 'service_type': 'COMPUTE_HOST' } } } deserializer = wsgi.JSONDeserializer() self.assertEqual(deserializer.deserialize(data), as_dict) def test_json_invalid_utf8(self): """Send invalid utf-8 to JSONDeserializer.""" data = b"""{"segment": { "name": "\xf0\x28\x8c\x28", "recovery_method": "auto", "description": "compute hosts with shared storage enabled." "service_type": "COMPUTE_HOST"}} """ deserializer = wsgi.JSONDeserializer() self.assertRaises(exception.MalformedRequestBody, deserializer.deserialize, data) class ResourceTest(MicroversionedTest): def get_req_id_header_name(self, request): return 'x-openstack-request-id' def test_resource_call_with_method_get(self): class Controller(object): def index(self, req): return 'success' app = fakes.TestRouter(Controller()) # the default method is GET req = webob.Request.blank('/tests') response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, HTTPStatus.OK) req.body = b'{"body": {"key": "value"}}' response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, HTTPStatus.OK) req.content_type = 'application/json' response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, HTTPStatus.OK) def test_resource_call_with_method_post(self): class Controller(object): @extensions.expected_errors(HTTPStatus.BAD_REQUEST) def create(self, req, body): if expected_body != body: raise exception.ConvertedException( code=HTTPStatus.BAD_REQUEST, explanation="The request body invalid") return "success" # verify the method: POST app = fakes.TestRouter(Controller()) req = webob.Request.blank('/tests', method="POST", content_type='application/json') req.body = b'{"body": {"key": "value"}}' expected_body = {'body': { "key": "value" } } response = req.get_response(app) self.assertEqual(response.status_int, HTTPStatus.OK) self.assertEqual(b'success', response.body) # verify without body expected_body = None req.body = None response = req.get_response(app) self.assertEqual(response.status_int, HTTPStatus.OK) self.assertEqual(b'success', response.body) # the body is validated in the controller expected_body = {'body': None} response = req.get_response(app) expected_unsupported_type_body = {'badRequest': {'message': 'The request body invalid', 'code': HTTPStatus.BAD_REQUEST}} self.assertEqual(response.status_int, HTTPStatus.BAD_REQUEST) self.assertEqual(expected_unsupported_type_body, jsonutils.loads(response.body)) def test_resource_call_with_method_put(self): class Controller(object): def update(self, req, id, body): if expected_body != body: msg = "The request body invalid" raise webob.exc.HTTPBadRequest(explanation=msg) return "success" # verify the method: PUT app = fakes.TestRouter(Controller()) req = webob.Request.blank('/tests/test_id', method="PUT", content_type='application/json') req.body = b'{"body": {"key": "value"}}' expected_body = {'body': { "key": "value" } } response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, HTTPStatus.OK) req.body = None expected_body = None response = req.get_response(app) self.assertEqual(response.status_int, HTTPStatus.OK) # verify no content_type is contained in the request req = webob.Request.blank('/tests/test_id', method="PUT", content_type='application/xml') req.content_type = 'application/xml' req.body = b'{"body": {"key": "value"}}' response = req.get_response(app) expected_unsupported_type_body = {'badMediaType': {'message': 'Unsupported Content-Type', 'code': HTTPStatus.UNSUPPORTED_MEDIA_TYPE}} self.assertEqual(response.status_int, HTTPStatus.UNSUPPORTED_MEDIA_TYPE) self.assertEqual(expected_unsupported_type_body, jsonutils.loads(response.body)) def test_resource_call_with_method_delete(self): class Controller(object): def delete(self, req, id): return "success" # verify the method: DELETE app = fakes.TestRouter(Controller()) req = webob.Request.blank('/tests/test_id', method="DELETE") response = req.get_response(app) self.assertEqual(response.status_int, HTTPStatus.OK) self.assertEqual(b'success', response.body) # ignore the body req.body = b'{"body": {"key": "value"}}' response = req.get_response(app) self.assertEqual(response.status_int, HTTPStatus.OK) self.assertEqual(b'success', response.body) def test_resource_not_authorized(self): class Controller(object): def index(self, req): raise exception.Forbidden() req = webob.Request.blank('/tests') app = fakes.TestRouter(Controller()) response = req.get_response(app) self.assertEqual(response.status_int, HTTPStatus.FORBIDDEN) def test_dispatch(self): class Controller(object): def index(self, req, pants=None): return pants controller = Controller() resource = wsgi.Resource(controller) method, extensions = resource.get_method(None, 'index', None, '') actual = resource.dispatch(method, None, {'pants': 'off'}) expected = 'off' self.assertEqual(actual, expected) def test_get_method_unknown_controller_method(self): class Controller(object): def index(self, req, pants=None): return pants controller = Controller() resource = wsgi.Resource(controller) self.assertRaises(AttributeError, resource.get_method, None, 'create', None, '') def test_get_method_action_json(self): class Controller(wsgi.Controller): @wsgi.action('fooAction') def _action_foo(self, req, id, body): return body controller = Controller() resource = wsgi.Resource(controller) method, extensions = resource.get_method(None, 'action', 'application/json', '{"fooAction": true}') self.assertEqual(controller._action_foo, method) def test_get_method_action_bad_body(self): class Controller(wsgi.Controller): @wsgi.action('fooAction') def _action_foo(self, req, id, body): return body controller = Controller() resource = wsgi.Resource(controller) self.assertRaises(exception.MalformedRequestBody, resource.get_method, None, 'action', 'application/json', '{}') def test_get_method_unknown_controller_action(self): class Controller(wsgi.Controller): @wsgi.action('fooAction') def _action_foo(self, req, id, body): return body controller = Controller() resource = wsgi.Resource(controller) self.assertRaises(KeyError, resource.get_method, None, 'action', 'application/json', '{"barAction": true}') def test_get_method_action_method(self): class Controller(object): def action(self, req, pants=None): return pants controller = Controller() resource = wsgi.Resource(controller) method, extensions = resource.get_method(None, 'action', 'application/xml', 'true,created_at=,deleted=,' 'deleted_at=,foo=123,missing=,' 'mutable_default=,readonly=,rel_object=,' 'rel_objects=,updated_at=)', repr(obj)) def test_obj_make_obj_compatible(self): subobj = MyOwnedObject(baz=1) subobj.VERSION = '1.2' obj = MyObj(rel_object=subobj) obj.obj_relationships = { 'rel_object': [('1.5', '1.1'), ('1.7', '1.2')], } orig_primitive = obj.obj_to_primitive()['masakari_object.data'] with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.8', 'rel_object') self.assertFalse(mock_compat.called) with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.7', 'rel_object') mock_compat.assert_called_once_with( primitive['rel_object']['masakari_object.data'], '1.2') with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.6', 'rel_object') mock_compat.assert_called_once_with( primitive['rel_object']['masakari_object.data'], '1.1') self.assertEqual('1.1', primitive[ 'rel_object']['masakari_object.version']) with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.5', 'rel_object') mock_compat.assert_called_once_with( primitive['rel_object']['masakari_object.data'], '1.1') self.assertEqual('1.1', primitive[ 'rel_object']['masakari_object.version']) with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.4', 'rel_object') self.assertFalse(mock_compat.called) self.assertNotIn('rel_object', primitive) def test_obj_make_compatible_hits_sub_objects(self): subobj = MyOwnedObject(baz=1) obj = MyObj(foo=123, rel_object=subobj) obj.obj_relationships = {'rel_object': [('1.0', '1.0')]} with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat: obj.obj_make_compatible({'rel_object': 'foo'}, '1.10') mock_compat.assert_called_once_with({'rel_object': 'foo'}, '1.10', 'rel_object') def test_obj_make_compatible_skips_unset_sub_objects(self): obj = MyObj(foo=123) obj.obj_relationships = {'rel_object': [('1.0', '1.0')]} with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat: obj.obj_make_compatible({'rel_object': 'foo'}, '1.10') self.assertFalse(mock_compat.called) def test_obj_make_compatible_doesnt_skip_falsey_sub_objects(self): @base.MasakariObjectRegistry.register_if(False) class MyList(base.ObjectListBase, base.MasakariObject): VERSION = '1.2' fields = {'objects': fields.ListOfObjectsField('MyObjElement')} obj_relationships = { 'objects': [('1.1', '1.1'), ('1.2', '1.2')], } mylist = MyList(objects=[]) @base.MasakariObjectRegistry.register_if(False) class MyOwner(base.MasakariObject): VERSION = '1.2' fields = {'mylist': fields.ObjectField('MyList')} obj_relationships = { 'mylist': [('1.1', '1.1')], } myowner = MyOwner(mylist=mylist) primitive = myowner.obj_to_primitive('1.1') self.assertIn('mylist', primitive['masakari_object.data']) def test_obj_make_compatible_handles_list_of_objects(self): subobj = MyOwnedObject(baz=1) obj = MyObj(rel_objects=[subobj]) obj.obj_relationships = {'rel_objects': [('1.0', '1.123')]} def fake_make_compat(primitive, version): self.assertEqual('1.123', version) self.assertIn('baz', primitive) with mock.patch.object(subobj, 'obj_make_compatible') as mock_mc: mock_mc.side_effect = fake_make_compat obj.obj_to_primitive('1.0') self.assertTrue(mock_mc.called) def test_delattr(self): obj = MyObj(bar='foo') del obj.bar # Should appear unset now self.assertFalse(obj.obj_attr_is_set('bar')) # Make sure post-delete, references trigger lazy loads self.assertEqual('loaded!', getattr(obj, 'bar')) def test_delattr_unset(self): obj = MyObj() self.assertRaises(AttributeError, delattr, obj, 'bar') class TestObject(_LocalTest, _TestObject): def test_set_defaults(self): obj = MyObj() obj.obj_set_defaults('foo') self.assertTrue(obj.obj_attr_is_set('foo')) self.assertEqual(1, obj.foo) def test_set_defaults_no_default(self): obj = MyObj() self.assertRaises(ovo_exc.ObjectActionError, obj.obj_set_defaults, 'bar') def test_set_all_defaults(self): obj = MyObj() obj.obj_set_defaults() self.assertEqual(set(['deleted', 'foo', 'mutable_default']), obj.obj_what_changed()) self.assertEqual(1, obj.foo) def test_set_defaults_not_overwrite(self): obj = MyObj(deleted=True) obj.obj_set_defaults() self.assertEqual(1, obj.foo) self.assertTrue(obj.deleted) class TestRegistry(test_base.NoDBTestCase): @mock.patch('masakari.objects.base.objects') def test_hook_chooses_newer_properly(self, mock_objects): del mock_objects.MyObj reg = base.MasakariObjectRegistry() reg.registration_hook(MyObj, 0) class MyNewerObj(object): VERSION = '1.123' @classmethod def obj_name(cls): return 'MyObj' self.assertEqual(MyObj, mock_objects.MyObj) reg.registration_hook(MyNewerObj, 0) self.assertEqual(MyNewerObj, mock_objects.MyObj) @mock.patch('masakari.objects.base.objects') def test_hook_keeps_newer_properly(self, mock_objects): del mock_objects.MyObj reg = base.MasakariObjectRegistry() reg.registration_hook(MyObj, 0) class MyOlderObj(object): VERSION = '1.1' @classmethod def obj_name(cls): return 'MyObj' self.assertEqual(MyObj, mock_objects.MyObj) reg.registration_hook(MyOlderObj, 0) self.assertEqual(MyObj, mock_objects.MyObj) # NOTE(Dinesh_Bhor): The hashes in this list should only be changed if # they come with a corresponding version bump in the affected # objects object_data = { 'FailoverSegment': '1.1-9cecc07c111f647b32d560f19f1f5db9', 'FailoverSegmentList': '1.0-dfc5c6f5704d24dcaa37b0bbb03cbe60', 'Host': '1.2-f05735b156b687bc916d46b551bc45e3', 'HostList': '1.0-25ebe1b17fbd9f114fae8b6a10d198c0', 'Notification': '1.2-d59495957ac67ee9863863d92def4178', 'NotificationProgressDetails': '1.0-fc611ac932b719fbc154dbe34bb8edee', 'NotificationList': '1.0-25ebe1b17fbd9f114fae8b6a10d198c0', 'EventType': '1.0-d1d2010a7391fa109f0868d964152607', 'ExceptionNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'ExceptionPayload': '1.0-96f178a12691e3ef0d8e3188fc481b90', 'HostApiNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'HostApiPayload': '1.0-20603e23a06477975b860459ba684d34', 'HostApiPayloadBase': '1.1-e02e80de08d0b584ee7a0a0320bfd029', 'NotificationApiPayload': '1.0-c050869a1f4aed23e7645bd4d1830ecd', 'NotificationApiPayloadBase': '1.0-cda8d53a77e64f83e3782fc9c4d499bb', 'NotificationApiNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'NotificationPublisher': '1.0-bbbc1402fb0e443a3eb227cc52b61545', 'MyObj': '1.6-ee7b607402fbfb3390a92ab7199e0d88', 'MyOwnedObject': '1.0-fec853730bd02d54cc32771dd67f08a0', 'SegmentApiNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'SegmentApiPayload': '1.1-e34e1c772e16e9ad492067ee98607b1d', 'SegmentApiPayloadBase': '1.1-6a1db76f3e825f92196fc1a11508d886', 'VMove': '1.0-5c4d8667b5612b8a49adc065f8961aa2', 'VMoveList': '1.0-63fff36dee683c7a1555798cb233ad3f' } def get_masakari_objects(): """Get masakari versioned objects This returns a dict of versioned objects which are in the Masakari project namespace only. ie excludes objects from os-vif and other 3rd party modules :return: a dict mapping class names to lists of versioned objects """ all_classes = base.MasakariObjectRegistry.obj_classes() masakari_classes = {} for name in all_classes: objclasses = all_classes[name] if (objclasses[0].OBJ_PROJECT_NAMESPACE != ( base.MasakariObject.OBJ_PROJECT_NAMESPACE)): continue masakari_classes[name] = objclasses return masakari_classes class TestObjectVersions(test_base.NoDBTestCase, _BaseTestCase): def setUp(self): super(test_base.NoDBTestCase, self).setUp() base.MasakariObjectRegistry.register_notification_objects() def test_versions(self): checker = fixture.ObjectVersionChecker( get_masakari_objects()) fingerprints = checker.get_hashes() if os.getenv('GENERATE_HASHES'): open('object_hashes.txt', 'w').write( pprint.pformat(fingerprints)) raise base.TestingException( 'Generated hashes in object_hashes.txt') expected, actual = checker.test_hashes(object_data) self.assertEqual(expected, actual, 'Some objects have changed; please make sure the ' 'versions have been bumped, and then update their ' 'hashes here.') def test_obj_make_compatible(self): base.MasakariObjectRegistry.register(segment.FailoverSegment) # Iterate all object classes and verify that we can run # obj_make_compatible with every older version than current. # This doesn't actually test the data conversions, but it at least # makes sure the method doesn't blow up on something basic like # expecting the wrong version format. # Hold a dictionary of args/kwargs that need to get passed into # __init__() for specific classes. The key in the dictionary is # the obj_class that needs the init args/kwargs. init_args = fake_args.init_args init_kwargs = fake_args.init_kwargs checker = fixture.ObjectVersionChecker( base.MasakariObjectRegistry.obj_classes()) checker.test_compatibility_routines(use_manifest=True, init_args=init_args, init_kwargs=init_kwargs) def test_list_obj_make_compatible(self): @base.MasakariObjectRegistry.register_if(False) class TestObj(base.MasakariObject): VERSION = '1.4' fields = {'foo': fields.IntegerField()} @base.MasakariObjectRegistry.register_if(False) class TestListObj(base.ObjectListBase, base.MasakariObject): VERSION = '1.5' fields = {'objects': fields.ListOfObjectsField('TestObj')} obj_relationships = { 'objects': [('1.0', '1.1'), ('1.1', '1.2'), ('1.3', '1.3'), ('1.5', '1.4')] } my_list = TestListObj() my_obj = TestObj(foo=1) my_list.objects = [my_obj] primitive = my_list.obj_to_primitive(target_version='1.5') primitive_data = primitive['masakari_object.data'] obj_primitive = my_obj.obj_to_primitive(target_version='1.4') obj_primitive_data = obj_primitive['masakari_object.data'] with mock.patch.object(TestObj, 'obj_make_compatible') as comp: my_list.obj_make_compatible(primitive_data, '1.1') comp.assert_called_with(obj_primitive_data, '1.2') def test_list_obj_make_compatible_when_no_objects(self): # Test to make sure obj_make_compatible works with no 'objects' # If a List object ever has a version that did not contain the # 'objects' key, we need to make sure converting back to that version # doesn't cause backporting problems. @base.MasakariObjectRegistry.register_if(False) class TestObj(base.MasakariObject): VERSION = '1.1' fields = {'foo': fields.IntegerField()} @base.MasakariObjectRegistry.register_if(False) class TestListObj(base.ObjectListBase, base.MasakariObject): VERSION = '1.1' fields = {'objects': fields.ListOfObjectsField('TestObj')} obj_relationships = { 'objects': [('1.1', '1.1')] } my_list = TestListObj() my_list.objects = [TestObj(foo=1)] primitive = my_list.obj_to_primitive(target_version='1.1') primitive_data = primitive['masakari_object.data'] my_list.obj_make_compatible(primitive_data, target_version='1.0') self.assertNotIn('objects', primitive_data, "List was backported to before 'objects' existed." " 'objects' should not be in the primitive.") class TestObjEqualPrims(_BaseTestCase): def test_object_equal(self): obj1 = MyObj(foo=1, bar='goodbye') obj1.obj_reset_changes() obj2 = MyObj(foo=1, bar='goodbye') obj2.obj_reset_changes() obj2.bar = 'goodbye' # obj2 will be marked with field 'three' updated self.assertTrue(base.obj_equal_prims(obj1, obj2), "Objects that differ only because one a is marked " "as updated should be equal") def test_object_not_equal(self): obj1 = MyObj(foo=1, bar='goodbye') obj1.obj_reset_changes() obj2 = MyObj(foo=1, bar='hello') obj2.obj_reset_changes() self.assertFalse(base.obj_equal_prims(obj1, obj2), "Objects that differ in any field " "should not be equal") def test_object_ignore_equal(self): obj1 = MyObj(foo=1, bar='goodbye') obj1.obj_reset_changes() obj2 = MyObj(foo=1, bar='hello') obj2.obj_reset_changes() self.assertTrue(base.obj_equal_prims(obj1, obj2, ['bar']), "Objects that only differ in an ignored field " "should be equal") class TestObjMethodOverrides(test_base.NoDBTestCase): def test_obj_reset_changes(self): args = inspect.getfullargspec(base.MasakariObject.obj_reset_changes) obj_classes = base.MasakariObjectRegistry.obj_classes() for obj_name in obj_classes: obj_class = obj_classes[obj_name][0] self.assertEqual(args, inspect.getfullargspec(obj_class.obj_reset_changes)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/objects/test_segments.py0000664000175100017510000003106315033036143024231 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from oslo_utils import timeutils from masakari.api import utils as api_utils from masakari import exception from masakari.objects import fields from masakari.objects import segment from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) fake_segment = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'uuid': uuidsentinel.fake_segment, 'name': 'foo-segment', 'service_type': 'COMPUTE', 'description': 'fake-description', 'recovery_method': 'auto', 'enabled': True } class TestFailoverSegmentObject(test_objects._LocalTest): @mock.patch('masakari.db.failover_segment_get_by_name') def test_get_by_name(self, mock_api_get): mock_api_get.return_value = fake_segment segment_obj = segment.FailoverSegment.get_by_name(self.context, 'foo-segment') self.compare_obj(segment_obj, fake_segment) mock_api_get.assert_called_once_with(self.context, 'foo-segment') @mock.patch('masakari.db.failover_segment_get_by_uuid') def test_get_by_uuid(self, mock_api_get): mock_api_get.return_value = fake_segment segment_obj = (segment.FailoverSegment. get_by_uuid(self.context, uuidsentinel.fake_segment)) self.compare_obj(segment_obj, fake_segment) mock_api_get.assert_called_once_with(self.context, uuidsentinel.fake_segment) @mock.patch('masakari.db.failover_segment_get_by_id') def test_get_by_id(self, mock_api_get): mock_api_get.return_value = fake_segment fake_id = 123 segment_obj = segment.FailoverSegment.get_by_id(self.context, fake_id) self.compare_obj(segment_obj, fake_segment) mock_api_get.assert_called_once_with(self.context, fake_id) def _segment_create_attribute(self): segment_obj = segment.FailoverSegment(context=self.context) segment_obj.name = 'foo-segment' segment_obj.description = 'keydata' segment_obj.service_type = 'fake-user' segment_obj.recovery_method = 'auto' segment_obj.uuid = uuidsentinel.fake_segment return segment_obj @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_create') def test_create(self, mock_segment_create, mock_notify_about_segment_api): mock_segment_create.return_value = fake_segment segment_obj = self._segment_create_attribute() segment_obj.create() self.compare_obj(segment_obj, fake_segment) mock_segment_create.assert_called_once_with(self.context, { 'uuid': uuidsentinel.fake_segment, 'name': 'foo-segment', 'description': 'keydata', 'service_type': 'fake-user', 'recovery_method': 'auto'}) action = fields.EventNotificationAction.SEGMENT_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start), mock.call(self.context, segment_obj, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_create') def test_recreate_fails(self, mock_segment_create, mock_notify_about_segment_api): mock_segment_create.return_value = fake_segment segment_obj = self._segment_create_attribute() segment_obj.create() self.assertRaises(exception.ObjectActionError, segment_obj.create) mock_segment_create.assert_called_once_with(self.context, { 'uuid': uuidsentinel.fake_segment, 'name': 'foo-segment', 'description': 'keydata', 'service_type': 'fake-user', 'recovery_method': 'auto'}) action = fields.EventNotificationAction.SEGMENT_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start), mock.call(self.context, segment_obj, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_delete') def test_destroy(self, mock_segment_destroy, mock_notify_about_segment_api): segment_obj = self._segment_create_attribute() segment_obj.id = 123 segment_obj.destroy() mock_segment_destroy.assert_called_once_with( self.context, uuidsentinel.fake_segment) action = fields.EventNotificationAction.SEGMENT_DELETE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start), mock.call(self.context, segment_obj, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_delete') def test_destroy_failover_segment_found(self, mock_segment_destroy, mock_notify_about_segment_api): mock_segment_destroy.side_effect = exception.FailoverSegmentNotFound( id=123) segment_obj = self._segment_create_attribute() segment_obj.id = 123 self.assertRaises(exception.FailoverSegmentNotFound, segment_obj.destroy) action = fields.EventNotificationAction.SEGMENT_DELETE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch('masakari.db.failover_segment_get_all_by_filters') def test_get_segment_by_recovery_method(self, mock_api_get): fake_segment2 = copy.deepcopy(fake_segment) fake_segment2['name'] = 'fake_segment2' mock_api_get.return_value = [fake_segment2, fake_segment] segment_result = (segment.FailoverSegmentList. get_all(self.context, filters={'recovery_method': 'auto'})) self.assertEqual(2, len(segment_result)) self.compare_obj(segment_result[0], fake_segment2) self.compare_obj(segment_result[1], fake_segment) mock_api_get.assert_called_once_with(self.context, filters={ 'recovery_method': 'auto' }, limit=None, marker=None, sort_dirs=None, sort_keys=None) @mock.patch('masakari.db.failover_segment_get_all_by_filters') def test_get_segment_by_service_type(self, mock_api_get): fake_segment2 = copy.deepcopy(fake_segment) fake_segment2['name'] = 'fake_segment' mock_api_get.return_value = [fake_segment2, fake_segment] segment_result = (segment.FailoverSegmentList. get_all(self.context, filters={'service_type': 'COMPUTE'})) self.assertEqual(2, len(segment_result)) self.compare_obj(segment_result[0], fake_segment2) self.compare_obj(segment_result[1], fake_segment) mock_api_get.assert_called_once_with(self.context, filters={ 'service_type': 'COMPUTE' }, limit=None, marker=None, sort_dirs=None, sort_keys=None) @mock.patch('masakari.db.failover_segment_get_all_by_filters') def test_get_limit_and_marker_invalid_marker(self, mock_api_get): segment_name = 'unknown_segment' mock_api_get.side_effect = exception.MarkerNotFound(marker=segment_name ) self.assertRaises(exception.MarkerNotFound, segment.FailoverSegmentList.get_all, self.context, limit=5, marker=segment_name) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_update') def test_save(self, mock_segment_update, mock_notify_about_segment_api): mock_segment_update.return_value = fake_segment segment_object = segment.FailoverSegment(context=self.context) segment_object.name = "foo-segment" segment_object.id = 123 segment_object.uuid = uuidsentinel.fake_segment segment_object.save() self.compare_obj(segment_object, fake_segment) self.assertTrue(mock_segment_update.called) action = fields.EventNotificationAction.SEGMENT_UPDATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_object, action=action, phase=phase_start), mock.call(self.context, segment_object, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_update') def test_save_failover_segment_not_found(self, mock_segment_update, mock_notify_about_segment_api): mock_segment_update.side_effect = ( exception.FailoverSegmentNotFound(id=uuidsentinel.fake_segment)) segment_object = segment.FailoverSegment(context=self.context) segment_object.name = "foo-segment" segment_object.id = 123 segment_object.uuid = uuidsentinel.fake_segment self.assertRaises(exception.FailoverSegmentNotFound, segment_object.save) action = fields.EventNotificationAction.SEGMENT_UPDATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, segment_object, action=action, phase=phase_start)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_update') def test_save_failover_segment_already_exists(self, mock_segment_update, mock_notify_about_segment_api): mock_segment_update.side_effect = ( exception.FailoverSegmentExists(name="foo-segment")) segment_object = segment.FailoverSegment(context=self.context) segment_object.name = "foo-segment" segment_object.id = 123 segment_object.uuid = uuidsentinel.fake_segment self.assertRaises(exception.FailoverSegmentExists, segment_object.save) action = fields.EventNotificationAction.SEGMENT_UPDATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, segment_object, action=action, phase=phase_start)] mock_notify_about_segment_api.assert_has_calls(notify_calls) def test_obj_make_compatible(self): segment_obj = segment.FailoverSegment(context=self.context) segment_obj.name = "foo-segment" segment_obj.id = 123 segment_obj.uuid = uuidsentinel.fake_segment segment_obj.enabled = True primitive = segment_obj.obj_to_primitive('1.1') self.assertIn('enabled', primitive['masakari_object.data']) primitive = segment_obj.obj_to_primitive('1.0') self.assertNotIn('enabled', primitive['masakari_object.data']) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/objects/test_vmoves.py0000664000175100017510000001004015033036143023713 0ustar00mylesmyles# Copyright(c) 2022 Inspur # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from unittest import mock from oslo_utils import timeutils from masakari import exception from masakari.objects import vmove from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) fake_vmove = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 1, 'uuid': uuidsentinel.fake_vmove, 'notification_uuid': uuidsentinel.fake_notification, 'instance_uuid': uuidsentinel.fake_instance, 'instance_name': 'fake_vm', 'source_host': 'fake_host1', 'dest_host': None, 'start_time': None, 'end_time': None, 'status': 'pending', 'type': 'evacuation', 'message': None } class TestVMoveObject(test_objects._LocalTest): @mock.patch('masakari.db.vmove_get_by_uuid') def test_get_by_uuid(self, mock_api_get): mock_api_get.return_value = fake_vmove vmove_obj = vmove.VMove.get_by_uuid( self.context, uuidsentinel.fake_vmove) self.compare_obj(vmove_obj, fake_vmove) mock_api_get.assert_called_once_with(self.context, uuidsentinel.fake_vmove) def _vmove_create_attributes(self): vmove_obj = vmove.VMove(context=self.context) vmove_obj.uuid = uuidsentinel.fake_vmove vmove_obj.notification_uuid = uuidsentinel.fake_notification vmove_obj.instance_uuid = uuidsentinel.fake_instance vmove_obj.instance_name = 'fake_vm1' vmove_obj.source_host = 'fake_host1' vmove_obj.status = 'pending' vmove_obj.type = 'evacuation' return vmove_obj @mock.patch('masakari.db.vmove_create') def test_create(self, mock_vmove_create): mock_vmove_create.return_value = fake_vmove vmove_obj = self._vmove_create_attributes() vmove_obj.create() self.compare_obj(vmove_obj, fake_vmove) mock_vmove_create.assert_called_once_with(self.context, { 'uuid': uuidsentinel.fake_vmove, 'notification_uuid': uuidsentinel.fake_notification, 'instance_uuid': uuidsentinel.fake_instance, 'instance_name': 'fake_vm1', 'source_host': 'fake_host1', 'status': 'pending', 'type': 'evacuation' }) @mock.patch('masakari.db.vmoves_get_all_by_filters') def test_get_limit_and_marker_invalid_marker(self, mock_api_get): vmove_uuid = uuidsentinel.fake_vmove mock_api_get.side_effect = (exception. MarkerNotFound(marker=vmove_uuid)) self.assertRaises(exception.MarkerNotFound, vmove.VMoveList.get_all, self.context, limit=5, marker=vmove_uuid) @mock.patch('masakari.db.vmove_update') def test_save(self, mock_vmove_update): mock_vmove_update.return_value = fake_vmove vmove_obj = self._vmove_create_attributes() vmove_obj.uuid = uuidsentinel.fake_vmove vmove_obj.save() self.compare_obj(vmove_obj, fake_vmove) (mock_vmove_update.assert_called_once_with( self.context, uuidsentinel.fake_vmove, {'uuid': uuidsentinel.fake_vmove, 'notification_uuid': uuidsentinel.fake_notification, 'instance_uuid': uuidsentinel.fake_instance, 'instance_name': 'fake_vm1', 'source_host': 'fake_host1', 'status': 'pending', 'type': 'evacuation'})) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/policy_fixture.py0000664000175100017510000000711115033036143022756 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import yaml import fixtures from oslo_policy import policy as oslo_policy import masakari.conf from masakari.conf import paths from masakari import policies import masakari.policy from masakari.tests.unit import fake_policy CONF = masakari.conf.CONF class RealPolicyFixture(fixtures.Fixture): """Load the live policy for tests. A base policy fixture that starts with the assumption that you'd like to load and enforce the shipped default policy in tests. Provides interfaces to tinker with both the contents and location of the policy file before loading to allow overrides. To do this implement ``_prepare_policy`` in the subclass, and adjust the ``policy_file`` accordingly. """ def _prepare_policy(self): """Allow changing of the policy before we get started""" pass def setUp(self): super(RealPolicyFixture, self).setUp() # policy_file can be overridden by subclasses self.policy_file = paths.state_path_def('etc/masakari/policy.yaml') self._prepare_policy() CONF.set_override('policy_file', self.policy_file, group='oslo_policy') masakari.policy.reset() masakari.policy.init(suppress_deprecation_warnings=True) self.addCleanup(masakari.policy.reset) def set_rules(self, rules, overwrite=True): policy = masakari.policy._ENFORCER policy.set_rules(oslo_policy.Rules.from_dict(rules), overwrite=overwrite) class PolicyFixture(RealPolicyFixture): """Load a fake policy from masakari.tests.unit.fake_policy This overrides the policy with a completely fake and synthetic policy file. """ def _prepare_policy(self): self.policy_dir = self.useFixture(fixtures.TempDir()) self.policy_file = os.path.join(self.policy_dir.path, 'policy.yaml') # load the fake_policy data and add the missing default rules. policy_rules = yaml.safe_load(fake_policy.policy_data) with open(self.policy_file, 'w') as f: yaml.dump(policy_rules, f) CONF.set_override('policy_dirs', [], group='oslo_policy') class RoleBasedPolicyFixture(RealPolicyFixture): """Load a modified policy which allows all actions only be a single roll. This fixture can be used for testing role based permissions as it provides a version of the policy which stomps over all previous declaration and makes every action only available to a single role. """ def __init__(self, role="admin", *args, **kwargs): super(RoleBasedPolicyFixture, self).__init__(*args, **kwargs) self.role = role def _prepare_policy(self): # Convert all actions to require the specified role policy = {} for rule in policies.list_rules(): policy[rule.name] = 'role:%s' % self.role self.policy_dir = self.useFixture(fixtures.TempDir()) self.policy_file = os.path.join(self.policy_dir.path, 'policy.yaml') with open(self.policy_file, 'w') as f: yaml.dump(policy, f) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_api_validation.py0000664000175100017510000006040115033036143023734 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus import re import fixtures from jsonschema import exceptions as jsonschema_exc from masakari.api import api_version_request as api_version from masakari.api import validation from masakari.api.validation import parameter_types from masakari.api.validation import validators from masakari import exception from masakari.tests.unit import base class FakeRequest(object): def __init__(self, version=None): if version is None: version = '1.0' self.api_version_request = api_version.APIVersionRequest(version) class ValidationRegex(base.NoDBTestCase): def test_build_regex_range(self): def _get_all_chars(): for i in range(0x7F): yield chr(i) self.useFixture(fixtures.MonkeyPatch( 'masakari.api.validation.parameter_types._get_all_chars', _get_all_chars)) r = parameter_types._build_regex_range(ws=False) self.assertEqual(r, re.escape('!') + '-' + re.escape('~')) # if we allow whitespace the range starts earlier r = parameter_types._build_regex_range(ws=True) self.assertEqual(r, re.escape(' ') + '-' + re.escape('~')) # excluding a character will give us 2 ranges r = parameter_types._build_regex_range(ws=True, exclude=['A']) self.assertEqual(r, re.escape(' ') + '-' + re.escape('@') + 'B' + '-' + re.escape('~')) # inverting which gives us all the initial unprintable characters. r = parameter_types._build_regex_range(ws=False, invert=True) self.assertEqual(r, re.escape('\x00') + '-' + re.escape(' ')) # excluding characters that create a singleton. Naively this would be: # ' -@B-BD-~' which seems to work, but ' -@BD-~' is more natural. r = parameter_types._build_regex_range(ws=True, exclude=['A', 'C']) self.assertEqual(r, re.escape(' ') + '-' + re.escape('@') + 'B' + 'D' + '-' + re.escape('~')) # ws=True means the positive regex has printable whitespaces, # so the inverse will not. The inverse will include things we # exclude. r = parameter_types._build_regex_range( ws=True, exclude=['A', 'B', 'C', 'Z'], invert=True) self.assertEqual(r, re.escape('\x00') + '-' + re.escape('\x1f') + 'A-CZ') class APIValidationTestCase(base.NoDBTestCase): def setUp(self, schema=None): super(APIValidationTestCase, self).setUp() self.post = None if schema is not None: @validation.schema(request_body_schema=schema) def post(req, body): return 'Validation succeeded.' self.post = post def check_validation_error(self, method, body, expected_detail, req=None): if not req: req = FakeRequest() try: method(body=body, req=req,) except exception.ValidationError as ex: self.assertEqual(HTTPStatus.BAD_REQUEST, ex.kwargs['code']) if isinstance(expected_detail, list): self.assertIn(ex.kwargs['detail'], expected_detail, 'Exception details did not match expected') elif not re.match(expected_detail, ex.kwargs['detail']): self.assertEqual(expected_detail, ex.kwargs['detail'], 'Exception details did not match expected') except Exception as ex: self.fail('An unexpected exception happens: %s' % ex) else: self.fail('Any exception does not happen.') class FormatCheckerTestCase(base.NoDBTestCase): def test_format_checker_failed(self): format_checker = validators.FormatChecker() exc = self.assertRaises(jsonschema_exc.FormatError, format_checker.check, " ", "name") self.assertIsInstance(exc.cause, exception.InvalidName) self.assertEqual("An invalid 'name' value was provided. The name must " "be: printable characters. " "Can not start or end with whitespace.", exc.cause.format_message()) def test_format_checker_failed_with_non_string(self): checks = ["name"] format_checker = validators.FormatChecker() for check in checks: exc = self.assertRaises(jsonschema_exc.FormatError, format_checker.check, None, "name") self.assertIsInstance(exc.cause, exception.InvalidName) self.assertEqual("An invalid 'name' value was provided. The name " "must be: printable characters. " "Can not start or end with whitespace.", exc.cause.format_message()) class RequiredDisableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, } super(RequiredDisableTestCase, self).setUp(schema=schema) def test_validate_required_disable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'abc': 1}, req=FakeRequest()), 'Validation succeeded.') class RequiredEnableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, 'required': ['foo'] } super(RequiredEnableTestCase, self).setUp(schema=schema) def test_validate_required_enable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') def test_validate_required_enable_fails(self): detail = "'foo' is a required property" self.check_validation_error(self.post, body={'abc': 1}, expected_detail=detail) class AdditionalPropertiesEnableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, 'required': ['foo'], } super(AdditionalPropertiesEnableTestCase, self).setUp(schema=schema) def test_validate_additionalProperties_enable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': 1, 'ext': 1}, req=FakeRequest()), 'Validation succeeded.') class AdditionalPropertiesDisableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, 'required': ['foo'], 'additionalProperties': False, } super(AdditionalPropertiesDisableTestCase, self).setUp(schema=schema) def test_validate_additionalProperties_disable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') def test_validate_additionalProperties_disable_fails(self): detail = "Additional properties are not allowed ('ext' was unexpected)" self.check_validation_error(self.post, body={'foo': 1, 'ext': 1}, expected_detail=detail) class PatternPropertiesTestCase(APIValidationTestCase): def setUp(self): schema = { 'patternProperties': { '^[a-zA-Z0-9]{1,10}$': { 'type': 'string' }, }, 'additionalProperties': False, } super(PatternPropertiesTestCase, self).setUp(schema=schema) def test_validate_patternProperties(self): self.assertEqual('Validation succeeded.', self.post(body={'foo': 'bar'}, req=FakeRequest())) def test_validate_patternProperties_fails(self): details = [ "Additional properties are not allowed ('__' was unexpected)", "'__' does not match any of the regexes: '^[a-zA-Z0-9]{1,10}$'" ] self.check_validation_error(self.post, body={'__': 'bar'}, expected_detail=details) details = [ "'' does not match any of the regexes: '^[a-zA-Z0-9]{1,10}$'", "Additional properties are not allowed ('' was unexpected)" ] self.check_validation_error(self.post, body={'': 'bar'}, expected_detail=details) details = [ ("'0123456789a' does not match any of the regexes: " "'^[a-zA-Z0-9]{1,10}$'"), ("Additional properties are not allowed ('0123456789a' was" " unexpected)") ] self.check_validation_error(self.post, body={'0123456789a': 'bar'}, expected_detail=details) detail = "expected string or bytes-like object" self.check_validation_error(self.post, body={None: 'bar'}, expected_detail=detail) class StringTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', }, }, } super(StringTestCase, self).setUp(schema=schema) def test_validate_string(self): self.assertEqual(self.post(body={'foo': 'abc'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '0'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': ''}, req=FakeRequest()), 'Validation succeeded.') def test_validate_string_fails(self): detail = ("Invalid input for field/attribute foo. Value: 1." " 1 is not of type 'string'") self.check_validation_error(self.post, body={'foo': 1}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 1.5." " 1.5 is not of type 'string'") self.check_validation_error(self.post, body={'foo': 1.5}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: True." " True is not of type 'string'") self.check_validation_error(self.post, body={'foo': True}, expected_detail=detail) class StringLengthTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', 'minLength': 1, 'maxLength': 10, }, }, } super(StringLengthTestCase, self).setUp(schema=schema) def test_validate_string_length(self): self.assertEqual(self.post(body={'foo': '0'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '0123456789'}, req=FakeRequest()), 'Validation succeeded.') def test_validate_string_length_fails(self): detail = [ "Invalid input for field/attribute foo. Value: . '' " "is too short", # jsonschema < 4.23.0 "Invalid input for field/attribute foo. Value: . '' " "should be non-empty", # jsonschema >= 4.23.0 ] self.check_validation_error(self.post, body={'foo': ''}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 0123456789a." " '0123456789a' is too long") self.check_validation_error(self.post, body={'foo': '0123456789a'}, expected_detail=detail) class IntegerTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': ['integer', 'string'], 'pattern': '^[0-9]+$', }, }, } super(IntegerTestCase, self).setUp(schema=schema) def test_validate_integer(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '1'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '0123456789'}, req=FakeRequest()), 'Validation succeeded.') def test_validate_integer_fails(self): detail = ("Invalid input for field/attribute foo. Value: abc." " 'abc' does not match '^[0-9]+$'") self.check_validation_error(self.post, body={'foo': 'abc'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: True." " True is not of type 'integer', 'string'") self.check_validation_error(self.post, body={'foo': True}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 0xffff." " '0xffff' does not match '^[0-9]+$'") self.check_validation_error(self.post, body={'foo': '0xffff'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 1.0." " 1.0 is not of type 'integer', 'string'") self.check_validation_error(self.post, body={'foo': 1.0}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 1.0." " '1.0' does not match '^[0-9]+$'") self.check_validation_error(self.post, body={'foo': '1.0'}, expected_detail=detail) class IntegerRangeTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': ['integer', 'string'], 'pattern': '^[0-9]+$', 'minimum': 1, 'maximum': 10, }, }, } super(IntegerRangeTestCase, self).setUp(schema=schema) def test_validate_integer_range(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': 10}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '1'}, req=FakeRequest()), 'Validation succeeded.') def test_validate_integer_range_fails(self): detail = ("Invalid input for field/attribute foo. Value: 0." " 0(.0)? is less than the minimum of 1") self.check_validation_error(self.post, body={'foo': 0}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 11." " 11(.0)? is greater than the maximum of 10") self.check_validation_error(self.post, body={'foo': 11}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 0." " 0(.0)? is less than the minimum of 1") self.check_validation_error(self.post, body={'foo': '0'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 11." " 11(.0)? is greater than the maximum of 10") self.check_validation_error(self.post, body={'foo': '11'}, expected_detail=detail) class BooleanTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': parameter_types.boolean, }, } super(BooleanTestCase, self).setUp(schema=schema) def test_validate_boolean(self): self.assertEqual('Validation succeeded.', self.post(body={'foo': True}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': False}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'True'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'False'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': '1'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': '0'}, req=FakeRequest())) def test_validate_boolean_fails(self): enum_boolean = ("[True, 'True', 'TRUE', 'true', '1', 'ON', 'On'," " 'on', 'YES', 'Yes', 'yes'," " False, 'False', 'FALSE', 'false', '0', 'OFF', 'Off'," " 'off', 'NO', 'No', 'no']") detail = ("Invalid input for field/attribute foo. Value: bar." " 'bar' is not one of %s") % enum_boolean self.check_validation_error(self.post, body={'foo': 'bar'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 2." " '2' is not one of %s") % enum_boolean self.check_validation_error(self.post, body={'foo': '2'}, expected_detail=detail) class NameTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': parameter_types.name, }, } super(NameTestCase, self).setUp(schema=schema) def test_validate_name(self): self.assertEqual('Validation succeeded.', self.post(body={'foo': 'm1.small'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'my server'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'a'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': '\u0434'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': '\u0434\u2006\ufffd'}, req=FakeRequest())) def test_validate_name_fails(self): error = ("An invalid 'name' value was provided. The name must be: " "printable characters. " "Can not start or end with whitespace.") should_fail = (' ', ' segment', 'segment ', 'a\xa0', # trailing unicode space '\uffff', # non-printable unicode ) for item in should_fail: self.check_validation_error(self.post, body={'foo': item}, expected_detail=error) # four-byte unicode, if supported by this python build try: self.check_validation_error(self.post, body={'foo': '\U00010000'}, expected_detail=error) except ValueError: pass class DatetimeTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', 'format': 'date-time', }, }, } super(DatetimeTestCase, self).setUp(schema=schema) def test_validate_datetime(self): self.assertEqual('Validation succeeded.', self.post(body={ 'foo': '2016-01-14T01:00:00Z'}, req=FakeRequest() )) class VersionedApiValidationTestCase(APIValidationTestCase): def setUp(self): super(__class__, self).setUp() schema_pre13 = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', }, }, 'additionalProperties': False, } schema_post13 = { 'type': 'object', 'properties': { 'bar': { 'type': 'boolean', }, }, 'additionalProperties': False, } @validation.schema(request_body_schema=schema_pre13, min_version='1.1', max_version='1.2') @validation.schema(request_body_schema=schema_post13, min_version='1.3') def post(req, body): return 'Validation succeeded.' self.post = post def check_validation_error(self, body, req): try: self.post(body=body, req=req) except exception.ValidationError as ex: self.assertEqual(HTTPStatus.BAD_REQUEST, ex.kwargs['code']) except Exception as ex: self.fail('An unexpected exception happens: %s' % ex) else: self.fail('Any exception does not happen.') def test_validate_with_proper_microversions(self): self.assertEqual('Validation succeeded.', self.post(body={ 'foo': 'ahappystring'}, req=FakeRequest('1.1') )) self.assertEqual('Validation succeeded.', self.post(body={ 'foo': 'ahappystring'}, req=FakeRequest('1.2') )) self.assertEqual('Validation succeeded.', self.post(body={ 'bar': True}, req=FakeRequest('1.3') )) self.assertEqual('Validation succeeded.', self.post(body={ 'bar': True}, req=FakeRequest('1.10') )) self.assertEqual('Validation succeeded.', self.post(body={ 'whatever': None}, req=FakeRequest('1.0') )) def test_validate_with_improper_microversions(self): self.check_validation_error(body={'bar': False}, req=FakeRequest('1.1')) self.check_validation_error(body={'bar': False}, req=FakeRequest('1.2')) self.check_validation_error(body={'foo': 'asadstring'}, req=FakeRequest('1.3')) self.check_validation_error(body={'foo': 'asadstring'}, req=FakeRequest('1.10')) self.check_validation_error(body={'foo': 'asadstring'}, req=FakeRequest('2.0')) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_conf.py0000664000175100017510000000560715033036143021705 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import tempfile import fixtures from oslo_config import cfg import masakari.conf.api from masakari.tests.unit import base class ConfTest(base.NoDBTestCase): """This is a test and pattern for parsing tricky options.""" class TestConfigOpts(cfg.ConfigOpts): def __call__(self, args=None, default_config_files=None): if default_config_files is None: default_config_files = [] return cfg.ConfigOpts.__call__( self, args=args, prog='test', version='1.0', usage='%(prog)s FOO BAR', default_config_files=default_config_files, validate_default_values=True) def setUp(self): super(ConfTest, self).setUp() self.useFixture(fixtures.NestedTempfile()) self.conf = self.TestConfigOpts() self.tempdirs = [] def create_tempfiles(self, files, ext='.conf'): tempfiles = [] for (basename, contents) in files: if not os.path.isabs(basename): (fd, path) = tempfile.mkstemp(prefix=basename, suffix=ext) else: path = basename + ext fd = os.open(path, os.O_CREAT | os.O_WRONLY) tempfiles.append(path) try: os.write(fd, contents.encode('utf-8')) finally: os.close(fd) return tempfiles def test_reserved_huge_page(self): masakari.conf.api.register_opts(self.conf) paths = self.create_tempfiles( [('1', '[DEFAULT]\n' 'osapi_max_limit = 1000\n')]) self.conf(['--config-file', paths[0]]) # NOTE(Dinesh_Bhor): In oslo.config if you specify a parameter # incorrectly, it silently drops it from the conf. Which means # the attr doesn't exist at all. The first attr test here is # for an unrelated boolean option that is using defaults (so # will always work. It's a basic control that *anything* is working. self.assertTrue(hasattr(self.conf, 'osapi_max_limit')) self.assertTrue(hasattr(self.conf, 'use_forwarded_for')) # NOTE(Dinesh_Bhor): Yes, this actually parses as an array holding # a dict. actual = 1000 self.assertEqual(actual, self.conf.osapi_max_limit) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_context.py0000664000175100017510000001453115033036143022440 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_context import context as o_context from oslo_context import fixture as o_fixture from masakari import context from masakari.tests.unit import base class ContextTestCase(base.NoDBTestCase): def setUp(self): super(ContextTestCase, self).setUp() self.useFixture(o_fixture.ClearRequestContext()) def test_request_context_elevated(self): user_ctxt = context.RequestContext('111', '222', is_admin=False) self.assertFalse(user_ctxt.is_admin) admin_ctxt = user_ctxt.elevated() self.assertTrue(admin_ctxt.is_admin) self.assertIn('admin', admin_ctxt.roles) self.assertFalse(user_ctxt.is_admin) self.assertNotIn('admin', user_ctxt.roles) def test_request_context_sets_is_admin(self): ctxt = context.RequestContext('111', '222', roles=['admin', 'weasel']) self.assertTrue(ctxt.is_admin) def test_request_context_sets_is_admin_upcase(self): ctxt = context.RequestContext('111', '222', roles=['Admin', 'weasel']) self.assertTrue(ctxt.is_admin) def test_request_context_read_deleted(self): ctxt = context.RequestContext('111', '222', read_deleted='yes') self.assertEqual('yes', ctxt.read_deleted) ctxt.read_deleted = 'no' self.assertEqual('no', ctxt.read_deleted) def test_request_context_read_deleted_none(self): ctxt = context.RequestContext('111', '222') self.assertEqual('no', ctxt.read_deleted) def test_request_context_read_deleted_invalid(self): self.assertRaises(ValueError, context.RequestContext, '111', '222', read_deleted=True) ctxt = context.RequestContext('111', '222') self.assertRaises(ValueError, setattr, ctxt, 'read_deleted', True) def test_service_catalog_default(self): ctxt = context.RequestContext('111', '222') self.assertEqual([], ctxt.service_catalog) ctxt = context.RequestContext('111', '222', service_catalog=[]) self.assertEqual([], ctxt.service_catalog) ctxt = context.RequestContext('111', '222', service_catalog=None) self.assertEqual([], ctxt.service_catalog) def test_store_when_no_overwrite(self): # If no context exists we store one even if overwrite is false # (since we are not overwriting anything). ctx = context.RequestContext('111', '222', overwrite=False) self.assertIs(o_context.get_current(), ctx) def test_no_overwrite(self): # If there is already a context in the cache a new one will # not overwrite it if overwrite=False. ctx1 = context.RequestContext('111', '222', overwrite=True) context.RequestContext('333', '444', overwrite=False) self.assertIs(o_context.get_current(), ctx1) def test_admin_no_overwrite(self): # If there is already a context in the cache creating an admin # context will not overwrite it. ctx1 = context.RequestContext('111', '222', overwrite=True) context.get_admin_context() self.assertIs(o_context.get_current(), ctx1) def test_convert_from_rc_to_dict(self): ctx = context.RequestContext( 111, 222, request_id='req-679033b7-1755-4929-bf85-eb3bfaef7e0b', timestamp='2016-03-02T22:31:56.641629') values2 = ctx.to_dict() expected_values = {'is_admin': False, 'project_id': 222, 'project_name': None, 'read_deleted': 'no', 'remote_address': None, 'request_id': 'req-679033b7-1755-4929-bf85-eb3bfaef7e0b', 'service_catalog': [], 'timestamp': '2016-03-02T22:31:56.641629', 'user_id': 111, 'user_name': None} self.assertEqual(values2, {**values2, **expected_values}) def test_convert_from_dict_then_to_dict(self): # TODO(tkajiam): Remove tenant once oslo.context is bumped to >= 4.0 values = {'is_admin': True, 'tenant': '222', 'project_id': '222', 'project_name': 'projname', 'read_deleted': 'yes', 'remote_address': '192.0.2.1', 'request_id': 'req-679033b7-1755-4929-bf85-eb3bfaef7e0b', 'service_catalog': [], 'timestamp': '2016-03-02T22:31:56.641629', 'user': '111', 'user_id': '111', 'user_name': 'username'} ctx = context.RequestContext.from_dict(values) self.assertEqual('111', ctx.user_id) self.assertEqual('222', ctx.project_id) values2 = ctx.to_dict() # TODO(tkajiam): Remove this once oslo.context is bumped to >= 4.0 values2.setdefault('tenant', values2.get('project_id')) self.assertEqual(values2, {**values2, **values}) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_exception.py0000664000175100017510000001350615033036143022753 0ustar00mylesmyles# Copyright 2016 NTT DATA # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from http import HTTPStatus import inspect from webob.util import status_reasons from masakari import exception from masakari.tests.unit import base class MasakariExceptionTestCase(base.NoDBTestCase): def test_default_error_msg(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "default message" exc = FakeMasakariException() self.assertEqual('default message', str(exc)) def test_error_msg(self): self.assertEqual('test', str(exception.MasakariException('test'))) def test_default_error_msg_with_kwargs(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "default message: %(code)s" exc = FakeMasakariException(code=int(HTTPStatus.INTERNAL_SERVER_ERROR)) self.assertEqual('default message: 500', str(exc)) self.assertEqual('default message: 500', exc.message) def test_error_msg_exception_with_kwargs(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "default message: %(misspelled_code)s" exc = FakeMasakariException(code=int(HTTPStatus.INTERNAL_SERVER_ERROR), misspelled_code='blah') self.assertEqual('default message: blah', str(exc)) self.assertEqual('default message: blah', exc.message) def test_default_error_code(self): class FakeMasakariException(exception.MasakariException): code = HTTPStatus.NOT_FOUND exc = FakeMasakariException() self.assertEqual(HTTPStatus.NOT_FOUND, exc.kwargs['code']) def test_error_code_from_kwarg(self): class FakeMasakariException(exception.MasakariException): code = HTTPStatus.INTERNAL_SERVER_ERROR exc = FakeMasakariException(code=HTTPStatus.NOT_FOUND) self.assertEqual(exc.kwargs['code'], HTTPStatus.NOT_FOUND) def test_format_message_local(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "some message" exc = FakeMasakariException() self.assertEqual(str(exc), exc.format_message()) def test_format_message_remote(self): class FakeMasakariException_Remote(exception.MasakariException): msg_fmt = "some message" def __str__(self): return "print the whole trace" exc = FakeMasakariException_Remote() self.assertEqual(u"print the whole trace", str(exc)) self.assertEqual("some message", exc.format_message()) def test_format_message_remote_error(self): class FakeMasakariException_Remote(exception.MasakariException): msg_fmt = "some message %(somearg)s" def __unicode__(self): return u"print the whole trace" self.flags(fatal_exception_format_errors=False) exc = FakeMasakariException_Remote(lame_arg='lame') self.assertEqual("some message %(somearg)s", exc.format_message()) class ConvertedExceptionTestCase(base.NoDBTestCase): def test_instantiate(self): exc = exception.ConvertedException(int(HTTPStatus.BAD_REQUEST), 'Bad Request', 'reason') self.assertEqual(exc.code, HTTPStatus.BAD_REQUEST) self.assertEqual(exc.title, 'Bad Request') self.assertEqual(exc.explanation, 'reason') def test_instantiate_without_title_known_code(self): exc = exception.ConvertedException( int(HTTPStatus.INTERNAL_SERVER_ERROR)) self.assertEqual(exc.title, status_reasons[HTTPStatus.INTERNAL_SERVER_ERROR]) def test_instantiate_without_title_unknown_code(self): exc = exception.ConvertedException(499) self.assertEqual(exc.title, 'Unknown Client Error') def test_instantiate_bad_code(self): self.assertRaises(KeyError, exception.ConvertedException, 10) class ExceptionTestCase(base.NoDBTestCase): @staticmethod def _raise_exc(exc): raise exc(int(HTTPStatus.INTERNAL_SERVER_ERROR)) def test_exceptions_raise(self): # NOTE(Dinesh_Bhor): disable format errors since we are not passing # kwargs self.flags(fatal_exception_format_errors=False) for name in dir(exception): exc = getattr(exception, name) # NOTE(yoctozepto): we skip HTTPStatus as it is not an exception # but a type also present in that module. if isinstance(exc, type) and name != 'HTTPStatus': self.assertRaises(exc, self._raise_exc, exc) class ExceptionValidMessageTestCase(base.NoDBTestCase): def test_messages(self): failures = [] for name, obj in inspect.getmembers(exception): if name in ['MasakariException', 'InstanceFaultRollback']: continue if not inspect.isclass(obj): continue if not issubclass(obj, exception.MasakariException): continue e = obj if e.msg_fmt == "An unknown exception occurred.": failures.append('%s needs a more specific msg_fmt' % name) if failures: self.fail('\n'.join(failures)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_hacking.py0000664000175100017510000005101315033036143022354 0ustar00mylesmyles# Copyright 2016 NTT Data. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import textwrap from unittest import mock import ddt import pycodestyle from masakari.hacking import checks from masakari.tests.unit import base @ddt.ddt class HackingTestCase(base.NoDBTestCase): """This class tests the hacking checks in masakari.hacking.checks by passing strings to the check methods like the pycodestyle/flake8 parser would. The parser loops over each line in the file and then passes the parameters to the check method. The parameter names in the check method dictate what type of object is passed to the check method. The parameter types are:: logical_line: A processed line with the following modifications: - Multi-line statements converted to a single line. - Stripped left and right. - Contents of strings replaced with "xxx" of same length. - Comments removed. physical_line: Raw line of text from the input file. lines: a list of the raw lines from the input file tokens: the tokens that contribute to this logical line line_number: line number in the input file total_lines: number of lines in the input file blank_lines: blank lines before this one indent_char: indentation character in this file (" " or "\t") indent_level: indentation (with tabs expanded to multiples of 8) previous_indent_level: indentation on previous line previous_logical: previous logical line filename: Path of the file being run through pycodestyle When running a test on a check method the return will be False/None if there is no violation in the sample input. If there is an error a tuple is returned with a position in the line, and a message. So to check the result just assertTrue if the check is expected to fail and assertFalse if it should pass. """ def test_assert_true_instance(self): self.assertEqual(len(list(checks.assert_true_instance( "self.assertTrue(isinstance(e, " "exception.BuildAbortException))"))), 1) self.assertEqual( len(list(checks.assert_true_instance("self.assertTrue()"))), 0) def test_assert_equal_type(self): self.assertEqual(len(list(checks.assert_equal_type( "self.assertEqual(type(als['QuicAssist']), list)"))), 1) self.assertEqual( len(list(checks.assert_equal_type("self.assertTrue()"))), 0) def test_assert_equal_in(self): self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(a in b, True)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual('str' in 'string', True)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(any(a==1 for a in b), True)"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, a in b)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, 'str' in 'string')"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, any(a==1 for a in b))"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(a in b, False)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual('str' in 'string', False)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(any(a==1 for a in b), False)"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, a in b)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, 'str' in 'string')"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, any(a==1 for a in b))"))), 0) def test_assert_true_or_false_with_in_or_not_in(self): self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A not in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A not in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A not in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A not in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in 'some string with spaces')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in 'some string with spaces')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in ['1', '2', '3'])"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in [1, 2, 3])"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(any(A > 5 for A in B))"))), 0) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(any(A > 5 for A in B), 'some message')"))), 0) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(some in list1 and some2 in list2)"))), 0) def test_no_setting_conf_directly_in_tests(self): self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = 1", "masakari/tests/test_foo.py"))), 1) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.group.option = 1", "masakari/tests/test_foo.py"))), 1) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = foo = 1", "masakari/tests/test_foo.py"))), 1) # Shouldn't fail with comparisons self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option == 'foo'", "masakari/tests/test_foo.py"))), 0) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option != 1", "masakari/tests/test_foo.py"))), 0) # Shouldn't fail since not in masakari/tests/ self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = 1", "masakari/compute/foo.py"))), 0) def test_no_mutable_default_args(self): self.assertEqual(1, len(list(checks.no_mutable_default_args( "def get_info_from_bdm(virt_type, bdm, mapping=[])")))) self.assertEqual(0, len(list(checks.no_mutable_default_args( "defined = []")))) self.assertEqual(0, len(list(checks.no_mutable_default_args( "defined, undefined = [], {}")))) def test_check_explicit_underscore_import(self): self.assertEqual(len(list(checks.check_explicit_underscore_import( "LOG.info(_('My info message'))", "masakari/tests/other_files.py"))), 1) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files.py"))), 1) self.assertEqual(len(list(checks.check_explicit_underscore_import( "from masakari.i18n import _", "masakari/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "LOG.info(_('My info message'))", "masakari/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "from masakari.i18n import _", "masakari/tests/other_files2.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files2.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "_ = translations.ugettext", "masakari/tests/other_files3.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files3.py"))), 0) # We are patching pycodestyle so that only the check under test is actually # installed. @mock.patch('pycodestyle._checks', {'physical_line': {}, 'logical_line': {}, 'tree': {}}) def _run_check(self, code, checker, filename=None): pycodestyle.register_check(checker) lines = textwrap.dedent(code).lstrip().splitlines(True) checker = pycodestyle.Checker(filename=filename, lines=lines) checker.check_all() checker.report._deferred_print.sort() return checker.report._deferred_print def _assert_has_errors(self, code, checker, expected_errors=None, filename=None): actual_errors = [e[:3] for e in self._run_check(code, checker, filename)] self.assertEqual(expected_errors or [], actual_errors) def _assert_has_no_errors(self, code, checker, filename=None): self._assert_has_errors(code, checker, filename=filename) def test_oslo_assert_raises_regexp(self): code = """ self.assertRaisesRegexp(ValueError, "invalid literal for.*XYZ'$", int, 'XYZ') """ self._assert_has_errors(code, checks.assert_raises_regexp, expected_errors=[(1, 0, "M319")]) def test_dict_constructor_with_list_copy(self): self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([(i, connect_info[i])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " attrs = dict([(k, _from_json(v))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " type_names = dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( "foo(param=dict((k, v) for k, v in bar.items()))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([[i,i] for i in range(3)])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dd = dict([i,i] for i in range(3))")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " create_kwargs = dict(snapshot=snapshot,")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " self._render_dict(xml, data_el, data.__dict__)")))) def test_check_contextlib_use(self): code = """ with base.nested( mock.patch.object(network_model.NetworkInfo, 'hydrate'), mock.patch.object(objects.InstanceInfoCache, 'save'), ) as ( hydrate_mock, save_mock ) """ filename = "masakari/api/openstack/ha/test.py" self._assert_has_no_errors(code, checks.check_no_contextlib_nested, filename=filename) code = """ with contextlib.nested( mock.patch.object(network_model.NetworkInfo, 'hydrate'), mock.patch.object(objects.InstanceInfoCache, 'save'), ) as ( hydrate_mock, save_mock ) """ filename = "masakari/api/openstack/compute/ha/test.py" errors = [(1, 0, 'M323')] self._assert_has_errors(code, checks.check_no_contextlib_nested, expected_errors=errors, filename=filename) def test_check_greenthread_spawns(self): errors = [(1, 0, "M322")] code = "greenthread.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "greenthread.spawn_n(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "eventlet.greenthread.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "eventlet.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "eventlet.spawn_n(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "masakari.utils.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_no_errors(code, checks.check_greenthread_spawns) code = "masakari.utils.spawn_n(func, arg1, kwarg1=kwarg1)" self._assert_has_no_errors(code, checks.check_greenthread_spawns) def test_config_option_regex_match(self): def should_match(code): self.assertTrue(checks.cfg_opt_re.match(code)) def should_not_match(code): self.assertFalse(checks.cfg_opt_re.match(code)) should_match("opt = cfg.StrOpt('opt_name')") should_match("opt = cfg.IntOpt('opt_name')") should_match("opt = cfg.DictOpt('opt_name')") should_match("opt = cfg.Opt('opt_name')") should_match("opts=[cfg.Opt('opt_name')]") should_match(" cfg.Opt('opt_name')") should_not_match("opt_group = cfg.OptGroup('opt_group_name')") def test_check_config_option_in_central_place(self): errors = [(1, 0, "M324")] code = """ opts = [ cfg.StrOpt('random_opt', default='foo', help='I am here to do stuff'), ] """ # option at the right place in the tree self._assert_has_no_errors(code, checks.check_config_option_in_central_place, filename="masakari/conf/serial_console.py") self._assert_has_errors(code, checks.check_config_option_in_central_place, filename="masakari/cmd/serialproxy.py", expected_errors=errors) def test_check_doubled_words(self): errors = [(1, 0, "M325")] # Explicit addition of line-ending here and below since this isn't a # block comment and without it we trigger #1804062. Artificial break is # necessary to stop flake8 detecting the test code = "'This is the" + " the best comment'\n" self._assert_has_errors(code, checks.check_doubled_words, expected_errors=errors) code = "'This is the then best comment'\n" self._assert_has_no_errors(code, checks.check_doubled_words) def test_dict_iteritems(self): self.assertEqual(1, len(list(checks.check_python3_no_iteritems( "obj.iteritems()")))) self.assertEqual(0, len(list(checks.check_python3_no_iteritems( "ob.items()")))) def test_dict_iterkeys(self): self.assertEqual(1, len(list(checks.check_python3_no_iterkeys( "for key in obj.iterkeys()")))) self.assertEqual(0, len(list(checks.check_python3_no_iterkeys( "for key in ob")))) def test_dict_itervalues(self): self.assertEqual(1, len(list(checks.check_python3_no_itervalues( "obj.itervalues()")))) self.assertEqual(0, len(list(checks.check_python3_no_itervalues( "ob.values()")))) def test_no_os_popen(self): code = """ import os foobar_cmd = "foobar -get -beer" answer = os.popen(foobar_cmd).read() if answer == "ok": try: os.popen(os.popen('foobar -beer -please')).read() except ValueError: go_home() """ errors = [(4, 0, 'M329'), (8, 8, 'M329')] self._assert_has_errors(code, checks.no_os_popen, expected_errors=errors) def test_no_log_warn(self): code = """ LOG.warn("LOG.warn is deprecated") """ errors = [(1, 0, 'M331')] self._assert_has_errors(code, checks.no_log_warn, expected_errors=errors) code = """ LOG.warning("LOG.warn is deprecated") """ self._assert_has_no_errors(code, checks.no_log_warn) @ddt.data('LOG.info(_LI("Bad"))', 'LOG.warning(_LW("Bad"))', 'LOG.error(_LE("Bad"))', 'LOG.exception(_("Bad"))', 'LOG.debug(_("Bad"))', 'LOG.critical(_LC("Bad"))') def test_no_translate_logs(self, log_statement): self.assertEqual(1, len(list(checks.no_translate_logs(log_statement)))) errors = [(1, 0, 'M308')] self._assert_has_errors(log_statement, checks.no_translate_logs, expected_errors=errors) def test_yield_followed_by_space(self): code = """ yield(x, y) yield{"type": "test"} yield[a, b, c] yield"test" yield'test' """ errors = [(x + 1, 0, 'M332') for x in range(5)] self._assert_has_errors(code, checks.yield_followed_by_space, expected_errors=errors) code = """ yield x yield (x, y) yield {"type": "test"} yield [a, b, c] yield "test" yield 'test' yieldx_func(a, b) """ self._assert_has_no_errors(code, checks.yield_followed_by_space) def test_check_policy_registration_in_central_place(self): errors = [(3, 0, "M333")] code = """ from masakari import policy policy.RuleDefault('context_is_admin', 'role:admin') """ # registration in the proper place self._assert_has_no_errors( code, checks.check_policy_registration_in_central_place, filename="masakari/policies/base.py") # option at a location which is not in scope right now self._assert_has_errors( code, checks.check_policy_registration_in_central_place, filename="masakari/api/openstack/ha/non_existent.py", expected_errors=errors) def test_check_policy_enforce(self): errors = [(3, 0, "M334")] code = """ from masakari import policy policy._ENFORCER.enforce('context_is_admin', target, credentials) """ self._assert_has_errors(code, checks.check_policy_enforce, expected_errors=errors) def test_check_policy_enforce_does_not_catch_other_enforce(self): # Simulate a different enforce method defined in masakari code = """ from masakari import foo foo.enforce() """ self._assert_has_no_errors(code, checks.check_policy_enforce) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_masakari_manage.py0000664000175100017510000000614015033036143024051 0ustar00mylesmyles# Copyright 2017 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from unittest import mock from masakari.cmd import manage from masakari import context from masakari.db import api as db_api from masakari.tests.unit import base class DBCommandsTestCase(base.TestCase): def setUp(self): super(DBCommandsTestCase, self).setUp() self.commands = manage.DbCommands() self.context = context.get_admin_context() sys.argv = ['masakari-manage'] @mock.patch.object(db_api, 'purge_deleted_rows') @mock.patch.object(context, 'get_admin_context') def test_purge_command(self, mock_context, mock_db_purge): mock_context.return_value = self.context self.commands.purge(0, 100) mock_db_purge.assert_called_once_with(self.context, 0, 100) def test_purge_negative_age_in_days(self): ex = self.assertRaises(SystemExit, self.commands.purge, -1, 100) self.assertEqual("Must supply a non-negative value for age.", ex.code) def test_purge_invalid_age_in_days(self): ex = self.assertRaises(SystemExit, self.commands.purge, "test", 100) self.assertEqual("Invalid value for age, test", ex.code) def test_purge_command_exceeded_age_in_days(self): ex = self.assertRaises(SystemExit, self.commands.purge, 1000000, 50) self.assertEqual("Maximal age is count of days since epoch.", ex.code) def test_purge_invalid_max_rows(self): ex = self.assertRaises(SystemExit, self.commands.purge, 0, 0) self.assertEqual("Must supply value greater than 0 for max_rows.", ex.code) def test_purge_negative_max_rows(self): ex = self.assertRaises(SystemExit, self.commands.purge, 0, -5) self.assertEqual("Invalid input received: max_rows must be >= -1", ex.code) @mock.patch.object(db_api, 'purge_deleted_rows') @mock.patch.object(context, 'get_admin_context') def test_purge_max_rows(self, mock_context, mock_db_purge): mock_context.return_value = self.context value = (2 ** 31) - 1 self.commands.purge(age_in_days=1, max_rows=value) mock_db_purge.assert_called_once_with(self.context, 1, value) def test_purge_command_exceeded_maximum_rows(self): # value(2 ** 31) is greater than max_rows(2147483647) by 1. value = 2 ** 31 ex = self.assertRaises(SystemExit, self.commands.purge, age_in_days=1, max_rows=value) expected = "Invalid input received: max_rows must be <= 2147483647" self.assertEqual(expected, ex.code) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_policy.py0000664000175100017510000002435015033036143022253 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test of Policy Engine For Masakari.""" import os.path from oslo_policy import policy as oslo_policy from oslo_serialization import jsonutils import requests_mock import masakari.conf from masakari import context from masakari import exception from masakari import policy from masakari.tests.unit import base from masakari.tests.unit import fake_policy from masakari.tests.unit import policy_fixture from masakari import utils CONF = masakari.conf.CONF class PolicyFileTestCase(base.NoDBTestCase): def setUp(self): super(PolicyFileTestCase, self).setUp() self.context = context.RequestContext('fake', 'fake') self.target = {} def test_modified_policy_reloads(self): with utils.tempdir() as tmpdir: tmpfilename = os.path.join(tmpdir, 'policy') self.flags(policy_file=tmpfilename, group='oslo_policy') # NOTE(Dinesh_Bhor): context construction invokes policy check to # determine is_admin or not. As a side-effect, policy reset is # needed here to flush existing policy cache. policy.reset() policy.init(suppress_deprecation_warnings=True) rule = oslo_policy.RuleDefault('example:test', "") policy._ENFORCER.register_defaults([rule]) action = "example:test" with open(tmpfilename, "w") as policyfile: policyfile.write('{"example:test": ""}') policy.authorize(self.context, action, self.target) with open(tmpfilename, "w") as policyfile: policyfile.write('{"example:test": "!"}') policy._ENFORCER.load_rules(True) self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.context, action, self.target) class PolicyTestCase(base.NoDBTestCase): def setUp(self): super(PolicyTestCase, self).setUp() rules = [ oslo_policy.RuleDefault("true", '@'), oslo_policy.RuleDefault("example:allowed", '@'), oslo_policy.RuleDefault("example:denied", "!"), oslo_policy.RuleDefault("example:get_http", "http://www.example.com"), oslo_policy.RuleDefault("example:my_file", "role:compute_admin or " "project_id:%(project_id)s"), oslo_policy.RuleDefault("example:early_and_fail", "! and @"), oslo_policy.RuleDefault("example:early_or_success", "@ or !"), oslo_policy.RuleDefault("example:lowercase_admin", "role:admin or role:sysadmin"), oslo_policy.RuleDefault("example:uppercase_admin", "role:ADMIN or role:sysadmin"), ] policy.reset() policy.init(suppress_deprecation_warnings=True) # before a policy rule can be used, its default has to be registered. policy._ENFORCER.register_defaults(rules) self.context = context.RequestContext('fake', 'fake', roles=['member']) self.target = {} def test_authorize_nonexistent_action_throws(self): action = "example:noexist" self.assertRaises(oslo_policy.PolicyNotRegistered, policy.authorize, self.context, action, self.target) def test_authorize_bad_action_throws(self): action = "example:denied" self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.context, action, self.target) def test_authorize_bad_action_noraise(self): action = "example:denied" result = policy.authorize(self.context, action, self.target, False) self.assertFalse(result) def test_authorize_good_action(self): action = "example:allowed" result = policy.authorize(self.context, action, self.target) self.assertTrue(result) @requests_mock.mock() def test_authorize_http_true(self, req_mock): req_mock.post('http://www.example.com/', text='True') action = "example:get_http" target = {} result = policy.authorize(self.context, action, target) self.assertTrue(result) @requests_mock.mock() def test_authorize_http_false(self, req_mock): req_mock.post('http://www.example.com/', text='False') action = "example:get_http" target = {} self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.context, action, target) def test_templatized_authorization(self): target_mine = {'project_id': 'fake'} target_not_mine = {'project_id': 'another'} action = "example:my_file" policy.authorize(self.context, action, target_mine) self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.context, action, target_not_mine) def test_early_AND_authorization(self): action = "example:early_and_fail" self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.context, action, self.target) def test_early_OR_authorization(self): action = "example:early_or_success" policy.authorize(self.context, action, self.target) def test_ignore_case_role_check(self): lowercase_action = "example:lowercase_admin" uppercase_action = "example:uppercase_admin" admin_context = context.RequestContext('admin', 'fake', roles=['AdMiN']) policy.authorize(admin_context, lowercase_action, self.target) policy.authorize(admin_context, uppercase_action, self.target) class IsAdminCheckTestCase(base.NoDBTestCase): def setUp(self): super(IsAdminCheckTestCase, self).setUp() policy.init(suppress_deprecation_warnings=True) def test_init_true(self): check = policy.IsAdminCheck('is_admin', 'True') self.assertEqual(check.kind, 'is_admin') self.assertEqual(check.match, 'True') self.assertTrue(check.expected) def test_init_false(self): check = policy.IsAdminCheck('is_admin', 'nottrue') self.assertEqual(check.kind, 'is_admin') self.assertEqual(check.match, 'False') self.assertFalse(check.expected) def test_call_true(self): check = policy.IsAdminCheck('is_admin', 'True') self.assertEqual(check('target', dict(is_admin=True), policy._ENFORCER), True) self.assertEqual(check('target', dict(is_admin=False), policy._ENFORCER), False) def test_call_false(self): check = policy.IsAdminCheck('is_admin', 'False') self.assertEqual(check('target', dict(is_admin=True), policy._ENFORCER), False) self.assertEqual(check('target', dict(is_admin=False), policy._ENFORCER), True) class AdminRolePolicyTestCase(base.NoDBTestCase): def setUp(self): super(AdminRolePolicyTestCase, self).setUp() self.policy = self.useFixture(policy_fixture.RoleBasedPolicyFixture()) self.context = context.RequestContext('fake', 'fake', roles=['member']) self.actions = policy.get_rules().keys() self.target = {} def test_authorize_admin_actions_with_nonadmin_context_throws(self): """Check if non-admin context passed to admin actions throws Policy not authorized exception """ for action in self.actions: self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.context, action, self.target) class RealRolePolicyTestCase(base.NoDBTestCase): def setUp(self): super(RealRolePolicyTestCase, self).setUp() self.policy = self.useFixture(policy_fixture.RealPolicyFixture()) self.admin_context = context.RequestContext('fake', 'fake', True, roles=['member']) self.non_admin_context = context.RequestContext('fake', 'fake', roles=['member']) self.target = {} self.fake_policy = jsonutils.loads(fake_policy.policy_data) self.admin_only_rules = ( "os_masakari_api:extensions:index", "os_masakari_api:extensions:detail", "os_masakari_api:os-hosts:index", "os_masakari_api:os-hosts:detail", "os_masakari_api:os-hosts:create", "os_masakari_api:os-hosts:update", "os_masakari_api:os-hosts:delete", "os_masakari_api:segments:index", "os_masakari_api:segments:detail", "os_masakari_api:segments:create", "os_masakari_api:segments:update", "os_masakari_api:segments:delete", "os_masakari_api:notifications:index", "os_masakari_api:notifications:detail", "os_masakari_api:notifications:create" ) def test_all_rules_in_sample_file(self): special_rules = ["context_is_admin", "admin_or_owner", "default"] for (name, rule) in self.fake_policy.items(): if name in special_rules: continue self.assertIn(name, policy.get_rules()) def test_admin_only_rules(self): for rule in self.admin_only_rules: self.assertRaises(exception.PolicyNotAuthorized, policy.authorize, self.non_admin_context, rule, {'project_id': 'fake', 'user_id': 'fake'}) policy.authorize(self.admin_context, rule, self.target) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_rpc.py0000664000175100017510000002512015033036143021534 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock import fixtures import oslo_messaging as messaging from oslo_messaging.rpc import dispatcher from oslo_serialization import jsonutils import testtools from masakari import context from masakari import rpc from masakari.tests.unit import base class FakeAPI(rpc.RPCAPI): RPC_API_VERSION = '1.0' TOPIC = 'engine' BINARY = 'masakari-engine' class RPCAPITestCase(base.TestCase): """Tests RPCAPI mixin aggregating stuff related to RPC compatibility.""" def setUp(self): super(RPCAPITestCase, self).setUp() @mock.patch('masakari.rpc.get_client') def test_init(self, get_client): def fake_get_client(target, version_cap=None, serializer=None): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) get_client.side_effect = fake_get_client FakeAPI() @mock.patch('masakari.rpc.get_client') def test_init_cached_caps(self, get_client): def fake_get_client(target, version_cap=None, serializer=None): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) get_client.side_effect = fake_get_client FakeAPI() @mock.patch.object(messaging, 'set_transport_defaults') def test_set_defaults(self, mock_set): control_exchange = mock.Mock() rpc.set_defaults(control_exchange) mock_set.assert_called_once_with(control_exchange) def test_add_extra_exmods(self): rpc.EXTRA_EXMODS = [] rpc.add_extra_exmods('foo', 'bar') self.assertEqual(['foo', 'bar'], rpc.EXTRA_EXMODS) def test_clear_extra_exmods(self): rpc.EXTRA_EXMODS = ['foo', 'bar'] rpc.clear_extra_exmods() self.assertEqual(0, len(rpc.EXTRA_EXMODS)) def test_get_allowed_exmods(self): rpc.ALLOWED_EXMODS = ['foo'] rpc.EXTRA_EXMODS = ['bar'] exmods = rpc.get_allowed_exmods() self.assertEqual(['foo', 'bar'], exmods) @mock.patch.object(rpc, 'RequestContextSerializer') @mock.patch.object(messaging, 'get_rpc_client') def test_get_client(self, mock_get, mock_ser): rpc.TRANSPORT = mock.Mock() tgt = mock.Mock() ser = mock.Mock() mock_get.return_value = 'client' mock_ser.return_value = ser client = rpc.get_client(tgt, version_cap='1.0', serializer='foo') mock_ser.assert_called_once_with('foo') mock_get.assert_called_once_with(rpc.TRANSPORT, tgt, version_cap='1.0', serializer=ser) self.assertEqual('client', client) @mock.patch.object(rpc, 'RequestContextSerializer') @mock.patch.object(messaging, 'get_rpc_server') def test_get_server(self, mock_get, mock_ser): rpc.TRANSPORT = mock.Mock() ser = mock.Mock() tgt = mock.Mock() ends = mock.Mock() mock_ser.return_value = ser mock_get.return_value = 'server' access_policy = dispatcher.DefaultRPCAccessPolicy server = rpc.get_server(tgt, ends, serializer='foo') mock_ser.assert_called_once_with('foo') mock_get.assert_called_once_with(rpc.TRANSPORT, tgt, ends, executor='eventlet', serializer=ser, access_policy=access_policy) self.assertEqual('server', server) class RPCResetFixture(fixtures.Fixture): def _setUp(self): self.trans = copy.copy(rpc.TRANSPORT) self.noti_trans = copy.copy(rpc.NOTIFICATION_TRANSPORT) self.noti = copy.copy(rpc.NOTIFIER) self.all_mods = copy.copy(rpc.ALLOWED_EXMODS) self.ext_mods = copy.copy(rpc.EXTRA_EXMODS) self.addCleanup(self._reset_everything) def _reset_everything(self): rpc.TRANSPORT = self.trans rpc.NOTIFICATION_TRANSPORT = self.noti_trans rpc.NOTIFIER = self.noti rpc.ALLOWED_EXMODS = self.all_mods rpc.EXTRA_EXMODS = self.ext_mods class TestRPC(testtools.TestCase): def setUp(self): super(TestRPC, self).setUp() self.useFixture(RPCResetFixture()) @mock.patch.object(rpc, 'get_allowed_exmods') @mock.patch.object(rpc, 'RequestContextSerializer') @mock.patch.object(messaging, 'get_notification_transport') @mock.patch.object(messaging, 'Notifier') def test_init_versioned(self, mock_notif, mock_noti_trans, mock_ser, mock_exmods): expected = [{'topics': ['versioned_notifications']}] self._test_init(mock_notif, mock_noti_trans, mock_ser, mock_exmods, 'versioned', expected) def test_cleanup_transport_null(self): rpc.TRANSPORT = None rpc.NOTIFICATION_TRANSPORT = mock.Mock() rpc.NOTIFIER = mock.Mock() self.assertRaises(AssertionError, rpc.cleanup) def test_cleanup_notification_transport_null(self): rpc.TRANSPORT = mock.Mock() rpc.NOTIFICATION_TRANSPORT = None rpc.NOTIFIER = mock.Mock() self.assertRaises(AssertionError, rpc.cleanup) def test_cleanup_notifier_null(self): rpc.TRANSPORT = mock.Mock() rpc.NOTIFICATION_TRANSPORT = mock.Mock() rpc.NOTIFIER = None self.assertRaises(AssertionError, rpc.cleanup) def test_cleanup(self): rpc.NOTIFIER = mock.Mock() rpc.NOTIFICATION_TRANSPORT = mock.Mock() rpc.TRANSPORT = mock.Mock() trans_cleanup = mock.Mock() not_trans_cleanup = mock.Mock() rpc.TRANSPORT.cleanup = trans_cleanup rpc.NOTIFICATION_TRANSPORT.cleanup = not_trans_cleanup rpc.cleanup() trans_cleanup.assert_called_once_with() not_trans_cleanup.assert_called_once_with() self.assertIsNone(rpc.TRANSPORT) self.assertIsNone(rpc.NOTIFICATION_TRANSPORT) self.assertIsNone(rpc.NOTIFIER) def test_get_versioned_notifier(self): rpc.NOTIFIER = mock.Mock() mock_prep = mock.Mock() mock_prep.return_value = 'notifier' rpc.NOTIFIER.prepare = mock_prep notifier = rpc.get_versioned_notifier('service.foo') mock_prep.assert_called_once_with(publisher_id='service.foo') self.assertEqual('notifier', notifier) def _test_init(self, mock_notif, mock_noti_trans, mock_ser, mock_exmods, notif_format, expected_driver_topic_kwargs, versioned_notification_topics=['versioned_notifications']): notifier = mock.Mock() notif_transport = mock.Mock() transport = mock.Mock() serializer = mock.Mock() conf = mock.Mock() conf.transport_url = None conf.notification_format = notif_format mock_exmods.return_value = ['foo'] conf.notifications.versioned_notifications_topics = ( versioned_notification_topics) mock_noti_trans.return_value = notif_transport mock_ser.return_value = serializer mock_notif.side_effect = [notifier] @mock.patch.object(rpc, 'CONF', new=conf) @mock.patch.object(rpc, 'create_transport') @mock.patch.object(rpc, 'get_transport_url') def _test(get_url, create_transport): create_transport.return_value = transport rpc.init(conf) create_transport.assert_called_once_with(get_url.return_value) _test() self.assertTrue(mock_exmods.called) self.assertIsNotNone(rpc.TRANSPORT) self.assertIsNotNone(rpc.NOTIFIER) self.assertEqual(notifier, rpc.NOTIFIER) expected_calls = [] for kwargs in expected_driver_topic_kwargs: expected_kwargs = {'serializer': serializer} expected_kwargs.update(kwargs) expected_calls.append(((notif_transport,), expected_kwargs)) self.assertEqual(expected_calls, mock_notif.call_args_list, "The calls to messaging.Notifier() did not create " "the versioned notifiers properly.") class TestJsonPayloadSerializer(base.NoDBTestCase): def test_serialize_entity(self): with mock.patch.object(jsonutils, 'to_primitive') as mock_prim: rpc.JsonPayloadSerializer.serialize_entity('context', 'entity') mock_prim.assert_called_once_with('entity', convert_instances=True) class TestRequestContextSerializer(base.NoDBTestCase): def setUp(self): super(TestRequestContextSerializer, self).setUp() self.mock_base = mock.Mock() self.ser = rpc.RequestContextSerializer(self.mock_base) self.ser_null = rpc.RequestContextSerializer(None) def test_serialize_entity(self): self.mock_base.serialize_entity.return_value = 'foo' ser_ent = self.ser.serialize_entity('context', 'entity') self.mock_base.serialize_entity.assert_called_once_with('context', 'entity') self.assertEqual('foo', ser_ent) def test_serialize_entity_null_base(self): ser_ent = self.ser_null.serialize_entity('context', 'entity') self.assertEqual('entity', ser_ent) def test_deserialize_entity(self): self.mock_base.deserialize_entity.return_value = 'foo' deser_ent = self.ser.deserialize_entity('context', 'entity') self.mock_base.deserialize_entity.assert_called_once_with('context', 'entity') self.assertEqual('foo', deser_ent) def test_deserialize_entity_null_base(self): deser_ent = self.ser_null.deserialize_entity('context', 'entity') self.assertEqual('entity', deser_ent) def test_serialize_context(self): context = mock.Mock() self.ser.serialize_context(context) context.to_dict.assert_called_once_with() @mock.patch.object(context, 'RequestContext') def test_deserialize_context(self, mock_req): self.ser.deserialize_context('context') mock_req.from_dict.assert_called_once_with('context') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_safeutils.py0000664000175100017510000000477315033036143022762 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools from masakari import safe_utils from masakari.tests.unit import base class WrappedCodeTestCase(base.NoDBTestCase): """Test the get_wrapped_function utility method.""" def _wrapper(self, function): @functools.wraps(function) def decorated_function(self, *args, **kwargs): function(self, *args, **kwargs) return decorated_function def test_single_wrapped(self): @self._wrapper def wrapped(self, instance, red=None, blue=None): pass func = safe_utils.get_wrapped_function(wrapped) func_code = func.__code__ self.assertEqual(4, len(func_code.co_varnames)) self.assertIn('self', func_code.co_varnames) self.assertIn('instance', func_code.co_varnames) self.assertIn('red', func_code.co_varnames) self.assertIn('blue', func_code.co_varnames) def test_double_wrapped(self): @self._wrapper @self._wrapper def wrapped(self, instance, red=None, blue=None): pass func = safe_utils.get_wrapped_function(wrapped) func_code = func.__code__ self.assertEqual(4, len(func_code.co_varnames)) self.assertIn('self', func_code.co_varnames) self.assertIn('instance', func_code.co_varnames) self.assertIn('red', func_code.co_varnames) self.assertIn('blue', func_code.co_varnames) def test_triple_wrapped(self): @self._wrapper @self._wrapper @self._wrapper def wrapped(self, instance, red=None, blue=None): pass func = safe_utils.get_wrapped_function(wrapped) func_code = func.__code__ self.assertEqual(4, len(func_code.co_varnames)) self.assertIn('self', func_code.co_varnames) self.assertIn('instance', func_code.co_varnames) self.assertIn('red', func_code.co_varnames) self.assertIn('blue', func_code.co_varnames) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_service.py0000664000175100017510000001467015033036143022420 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for remote procedure calls using queue """ from unittest import mock from oslo_concurrency import processutils from oslo_config import cfg from oslo_service import service as _service from masakari import exception from masakari import manager from masakari import rpc from masakari import service from masakari.tests.unit import base CONF = cfg.CONF class FakeManager(manager.Manager): """Fake manager for tests.""" def test_method(self): return 'manager' class ServiceManagerTestCase(base.NoDBTestCase): """Test cases for Services.""" @mock.patch.object(rpc, 'init') def test_message_gets_to_manager(self, mock_rpc_init): serv = service.Service('test', 'test', 'test', 'masakari.tests.unit.test_service.FakeManager') self.assertEqual('manager', serv.test_method()) class ServiceTestCase(base.NoDBTestCase): """Test cases for Services.""" def setUp(self): super(ServiceTestCase, self).setUp() self.host = 'foo' self.binary = 'masakari-engine' self.topic = 'fake' @mock.patch.object(rpc, 'init') def test_create(self, mock_rpc_init): app = service.Service.create(host=self.host, binary=self.binary, topic=self.topic) self.assertTrue(app) @mock.patch.object(rpc, 'init') def test_repr(self, mock_rpc_init): # Test if a Service object is correctly represented, for example in # log files. serv = service.Service(self.host, self.binary, self.topic, 'masakari.tests.unit.test_service.FakeManager') exp = "" self.assertEqual(exp, repr(serv)) @mock.patch.object(_service.Service, 'stop') @mock.patch.object(rpc, 'init') @mock.patch.object(rpc, 'get_server') def test_parent_graceful_shutdown(self, mock_rpc, mock_rpc_init, mock_stop): serv = service.Service(self.host, self.binary, self.topic, 'masakari.tests.unit.test_service.FakeManager') serv.manager = mock.Mock() serv.manager.service_name = self.topic serv.start() serv.stop() serv.rpcserver.start.assert_called_once_with() serv.rpcserver.stop.assert_called_once_with() mock_stop.assert_called_once_with() @mock.patch.object(rpc, 'init') def test_reset(self, mock_rpc_init): serv = service.Service(self.host, self.binary, self.topic, 'masakari.tests.unit.test_service.FakeManager') with mock.patch.object(serv.manager, 'reset') as mock_reset: serv.reset() mock_reset.assert_called_once_with() class TestWSGIService(base.NoDBTestCase): def setUp(self): super(TestWSGIService, self).setUp() self.stub_out('masakari.api.wsgi.Loader.load_app', mock.MagicMock()) def test_workers_set_default(self): test_service = service.WSGIService("masakari_api") self.assertEqual(test_service.workers, processutils.get_worker_count()) def test_workers_set_good_user_setting(self): CONF.set_override('masakari_api_workers', 8) test_service = service.WSGIService("masakari_api") self.assertEqual(test_service.workers, 8) def test_workers_set_zero_user_setting(self): CONF.set_override('masakari_api_workers', 0) test_service = service.WSGIService("masakari_api") # If a value less than 1 is used, defaults to number of procs available self.assertEqual(test_service.workers, processutils.get_worker_count()) def test_service_start_with_illegal_workers(self): CONF.set_override("masakari_api_workers", -1) self.assertRaises(exception.InvalidInput, service.WSGIService, "masakari_api") def test_reset_pool_size_to_default(self): test_service = service.WSGIService("test_service") test_service.start() # Stopping the service, which in turn sets pool size to 0 test_service.stop() self.assertEqual(test_service.server._pool.size, 0) # Resetting pool size to default test_service.reset() test_service.start() self.assertEqual(test_service.server._pool.size, CONF.wsgi.default_pool_size) class TestLauncher(base.NoDBTestCase): @mock.patch.object(_service, 'launch') def test_launch_app(self, mock_launch): service._launcher = None service.serve(mock.sentinel.service) mock_launch.assert_called_once_with(mock.ANY, mock.sentinel.service, workers=None, restart_method='mutate') @mock.patch.object(_service, 'launch') def test_launch_app_with_workers(self, mock_launch): service._launcher = None service.serve(mock.sentinel.service, workers=mock.sentinel.workers) mock_launch.assert_called_once_with(mock.ANY, mock.sentinel.service, workers=mock.sentinel.workers, restart_method='mutate') @mock.patch.object(_service, 'launch') def test_launch_app_more_than_once_raises(self, mock_launch): service._launcher = None service.serve(mock.sentinel.service) self.assertRaises(RuntimeError, service.serve, mock.sentinel.service) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_utils.py0000664000175100017510000002173015033036143022113 0ustar00mylesmyles# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import importlib from unittest import mock import eventlet from oslo_config import cfg from oslo_context import context as common_context from oslo_context import fixture as context_fixture import masakari from masakari import context from masakari import exception from masakari.tests.unit import base from masakari import utils CONF = cfg.CONF class UTF8TestCase(base.NoDBTestCase): def test_none_value(self): self.assertIsInstance(utils.utf8(None), type(None)) def test_bytes_value(self): some_value = b"fake data" return_value = utils.utf8(some_value) # check that type of returned value doesn't changed self.assertIsInstance(return_value, type(some_value)) self.assertEqual(some_value, return_value) def test_not_text_type(self): return_value = utils.utf8(1) self.assertEqual(b"1", return_value) self.assertIsInstance(return_value, bytes) def test_text_type_with_encoding(self): some_value = 'test\u2026config' self.assertEqual(some_value, utils.utf8(some_value).decode("utf-8")) class MonkeyPatchTestCase(base.NoDBTestCase): """Unit test for utils.monkey_patch().""" def setUp(self): super(MonkeyPatchTestCase, self).setUp() self.example_package = 'masakari.tests.unit.monkey_patch_example.' self.flags( monkey_patch=True, monkey_patch_modules=[self.example_package + 'example_a' + ':' + self.example_package + 'example_decorator']) def test_monkey_patch(self): utils.monkey_patch() masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION = [] from masakari.tests.unit.monkey_patch_example import example_a from masakari.tests.unit.monkey_patch_example import example_b self.assertEqual('Example function', example_a.example_function_a()) exampleA = example_a.ExampleClassA() exampleA.example_method() ret_a = exampleA.example_method_add(3, 5) self.assertEqual(ret_a, 8) self.assertEqual('Example function', example_b.example_function_b()) exampleB = example_b.ExampleClassB() exampleB.example_method() ret_b = exampleB.example_method_add(3, 5) self.assertEqual(ret_b, 8) package_a = self.example_package + 'example_a.' self.assertIn(package_a + 'example_function_a', masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION) self.assertIn(package_a + 'ExampleClassA.example_method', masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION) self.assertIn(package_a + 'ExampleClassA.example_method_add', masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION) package_b = self.example_package + 'example_b.' self.assertNotIn(package_b + 'example_function_b', ( masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION)) self.assertNotIn(package_b + 'ExampleClassB.example_method', ( masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION)) self.assertNotIn(package_b + 'ExampleClassB.example_method_add', ( masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION)) class MonkeyPatchDefaultTestCase(base.NoDBTestCase): """Unit test for default monkey_patch_modules value.""" def setUp(self): super(MonkeyPatchDefaultTestCase, self).setUp() self.flags( monkey_patch=True) def test_monkey_patch_default_mod(self): # monkey_patch_modules is defined to be # : # Here we check that both parts of the default values are # valid for module in CONF.monkey_patch_modules: m = module.split(':', 1) # Check we can import the module to be patched importlib.import_module(m[0]) # check the decorator is valid decorator_name = m[1].rsplit('.', 1) decorator_module = importlib.import_module(decorator_name[0]) getattr(decorator_module, decorator_name[1]) class ExpectedArgsTestCase(base.NoDBTestCase): def test_passes(self): @utils.expects_func_args('foo', 'baz') def dec(f): return f @dec def func(foo, bar, baz="lol"): pass # Call to ensure nothing errors func(None, None) def test_raises(self): @utils.expects_func_args('foo', 'baz') def dec(f): return f def func(bar, baz): pass self.assertRaises(TypeError, dec, func) def test_var_no_of_args(self): @utils.expects_func_args('foo') def dec(f): return f @dec def func(bar, *args, **kwargs): pass # Call to ensure nothing errors func(None) def test_more_layers(self): @utils.expects_func_args('foo', 'baz') def dec(f): return f def dec_2(f): def inner_f(*a, **k): return f() return inner_f @dec_2 def func(bar, baz): pass self.assertRaises(TypeError, dec, func) class SpawnNTestCase(base.NoDBTestCase): def setUp(self): super(SpawnNTestCase, self).setUp() self.useFixture(context_fixture.ClearRequestContext()) self.spawn_name = 'spawn_n' def test_spawn_n_no_context(self): self.assertIsNone(common_context.get_current()) def _fake_spawn(func, *args, **kwargs): # call the method to ensure no error is raised func(*args, **kwargs) self.assertEqual('test', args[0]) def fake(arg): pass with mock.patch.object(eventlet, self.spawn_name, _fake_spawn): getattr(utils, self.spawn_name)(fake, 'test') self.assertIsNone(common_context.get_current()) def test_spawn_n_context(self): self.assertIsNone(common_context.get_current()) ctxt = context.RequestContext('user', 'project') def _fake_spawn(func, *args, **kwargs): # call the method to ensure no error is raised func(*args, **kwargs) self.assertEqual(ctxt, args[0]) self.assertEqual('test', kwargs['kwarg1']) def fake(context, kwarg1=None): pass with mock.patch.object(eventlet, self.spawn_name, _fake_spawn): getattr(utils, self.spawn_name)(fake, ctxt, kwarg1='test') self.assertEqual(ctxt, common_context.get_current()) def test_spawn_n_context_different_from_passed(self): self.assertIsNone(common_context.get_current()) ctxt = context.RequestContext('user', 'project') ctxt_passed = context.RequestContext('user', 'project', overwrite=False) self.assertEqual(ctxt, common_context.get_current()) def _fake_spawn(func, *args, **kwargs): # call the method to ensure no error is raised func(*args, **kwargs) self.assertEqual(ctxt_passed, args[0]) self.assertEqual('test', kwargs['kwarg1']) def fake(context, kwarg1=None): pass with mock.patch.object(eventlet, self.spawn_name, _fake_spawn): getattr(utils, self.spawn_name)(fake, ctxt_passed, kwarg1='test') self.assertEqual(ctxt, common_context.get_current()) class SpawnTestCase(SpawnNTestCase): def setUp(self): super(SpawnTestCase, self).setUp() self.spawn_name = 'spawn' class ValidateIntegerTestCase(base.NoDBTestCase): def test_exception_converted(self): self.assertRaises(exception.InvalidInput, utils.validate_integer, "im-not-an-int", "not-an-int") self.assertRaises(exception.InvalidInput, utils.validate_integer, 3.14, "Pie") self.assertRaises(exception.InvalidInput, utils.validate_integer, "299", "Sparta no-show", min_value=300, max_value=300) self.assertRaises(exception.InvalidInput, utils.validate_integer, 55, "doing 55 in a 54", max_value=54) self.assertRaises(exception.InvalidInput, utils.validate_integer, chr(129), "UnicodeError", max_value=1000) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/test_versions.py0000664000175100017510000000416115033036143022622 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import builtins import io from oslo_config import cfg from masakari.tests.unit import base from masakari import version class VersionTestCase(base.NoDBTestCase): """Test cases for Versions code.""" def test_version_string_with_package_is_good(self): """Ensure uninstalled code get version string.""" self.stub_out('masakari.version.version_info.version_string', lambda: '5.5.5.5') self.stub_out('masakari.version.MASAKARI_PACKAGE', 'g9ec3421') self.assertEqual("5.5.5.5-g9ec3421", version.version_string_with_package()) def test_release_file(self): version.loaded = False real_open = builtins.open real_find_file = cfg.CONF.find_file def fake_find_file(self, name): if name == "release": return "/etc/masakari/release" return real_find_file(self, name) def fake_open(path, *args, **kwargs): if path == "/etc/masakari/release": data = """[Masakari] vendor = ACME Corporation product = ACME Masakari package = 1337""" return io.StringIO(data) return real_open(path, *args, **kwargs) self.stub_out('builtins.open', fake_open) self.stub_out('oslo_config.cfg.ConfigOpts.find_file', fake_find_file) self.assertEqual(version.vendor_string(), "ACME Corporation") self.assertEqual(version.product_string(), "ACME Masakari") self.assertEqual(version.package_string(), "1337") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/unit/utils.py0000664000175100017510000000376615033036143021065 0ustar00mylesmyles# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import errno import platform import socket import sys import masakari.conf import masakari.context import masakari.utils CONF = masakari.conf.CONF def get_test_admin_context(): return masakari.context.get_admin_context() def is_linux(): return platform.system() == 'Linux' def coreutils_readlink_available(): _out, err = masakari.utils.trycmd('readlink', '-nm', '/') return err == '' def is_ipv6_supported(): has_ipv6_support = socket.has_ipv6 try: s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) s.close() except socket.error as e: if e.errno == errno.EAFNOSUPPORT: has_ipv6_support = False else: raise # check if there is at least one interface with ipv6 if has_ipv6_support and sys.platform.startswith('linux'): try: with open('/proc/net/if_inet6') as f: if not f.read(): has_ipv6_support = False except IOError: has_ipv6_support = False return has_ipv6_support def get_api_version(request): if request.path[2:3].isdigit(): return int(request.path[2:3]) def _compare_args(args1, args2, cmp): return all(cmp(*pair) for pair in zip(args1, args2)) def _compare_kwargs(kwargs1, kwargs2, cmp): return all(cmp(kwargs1[k], kwargs2[k]) for k in set(list(kwargs1.keys()) + list(kwargs2.keys()))) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/tests/uuidsentinel.py0000664000175100017510000000210315033036143021436 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys class UUIDSentinels(object): def __init__(self): from oslo_utils import uuidutils self._uuid_module = uuidutils self._sentinels = {} def __getattr__(self, name): if name.startswith('_'): raise ValueError('Sentinels must not start with _') if name not in self._sentinels: self._sentinels[name] = self._uuid_module.generate_uuid() return self._sentinels[name] sys.modules[__name__] = UUIDSentinels() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/utils.py0000664000175100017510000002244615033036143016740 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" import contextlib import functools import inspect import pyclbr import shutil import sys import tempfile import eventlet from oslo_concurrency import lockutils from oslo_context import context as common_context from oslo_log import log as logging from oslo_utils import importutils from oslo_utils import strutils from oslo_utils import timeutils import masakari.conf from masakari import exception from masakari.i18n import _ from masakari import safe_utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def reraise(tp, value, tb=None): try: if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value finally: value = None tb = None def utf8(value): """Try to turn a string into utf-8 if possible. The original code was copied from the utf8 function in http://github.com/facebook/tornado/blob/master/tornado/escape.py """ if value is None or isinstance(value, bytes): return value if not isinstance(value, str): value = str(value) return value.encode('utf-8') def check_isinstance(obj, cls): """Checks that obj is of type cls, and lets PyLint infer types.""" if isinstance(obj, cls): return obj raise Exception(_('Expected object of type: %s') % (str(cls))) def monkey_patch(): """If the CONF.monkey_patch set as True, this function patches a decorator for all functions in specified modules. You can set decorators for each modules using CONF.monkey_patch_modules. The format is "Module path:Decorator function". name - name of the function function - object of the function """ # If CONF.monkey_patch is not True, this function do nothing. if not CONF.monkey_patch: return def is_method(obj): # Unbound methods became regular functions on Python 3 return inspect.ismethod(obj) or inspect.isfunction(obj) # Get list of modules and decorators for module_and_decorator in CONF.monkey_patch_modules: module, decorator_name = module_and_decorator.split(':') # import decorator function decorator = importutils.import_class(decorator_name) __import__(module) # Retrieve module information using pyclbr module_data = pyclbr.readmodule_ex(module) for key, value in module_data.items(): # set the decorator for the class methods if isinstance(value, pyclbr.Class): clz = importutils.import_class("%s.%s" % (module, key)) for method, func in inspect.getmembers(clz, is_method): setattr(clz, method, decorator("%s.%s.%s" % (module, key, method), func)) # set the decorator for the function if isinstance(value, pyclbr.Function): func = importutils.import_class("%s.%s" % (module, key)) setattr(sys.modules[module], key, decorator("%s.%s" % (module, key), func)) def walk_class_hierarchy(clazz, encountered=None): """Walk class hierarchy, yielding most derived classes first.""" if not encountered: encountered = [] for subclass in clazz.__subclasses__(): if subclass not in encountered: encountered.append(subclass) # drill down to leaves first for subsubclass in walk_class_hierarchy(subclass, encountered): yield subsubclass yield subclass def expects_func_args(*args): def _decorator_checker(dec): @functools.wraps(dec) def _decorator(f): base_f = safe_utils.get_wrapped_function(f) arg_names, a, kw, _, _, _, _ = inspect.getfullargspec(base_f) if a or kw or set(args) <= set(arg_names): # NOTE : We can't really tell if correct stuff will # be passed if it's a function with *args or **kwargs so # we still carry on and hope for the best return dec(f) else: raise TypeError("Decorated function %(f_name)s does not " "have the arguments expected by the " "decorator %(d_name)s" % {'f_name': base_f.__name__, 'd_name': dec.__name__}) return _decorator return _decorator_checker def isotime(at=None): """Current time as ISO string, as timeutils.isotime() is deprecated :returns: Current time in ISO format """ if not at: at = timeutils.utcnow() date_string = at.strftime("%Y-%m-%dT%H:%M:%S") tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' date_string += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return date_string def strtime(at): return at.strftime("%Y-%m-%dT%H:%M:%S.%f") class ExceptionHelper(object): """Class to wrap another and translate the ClientExceptions raised by its function calls to the actual ones. """ def __init__(self, target): self._target = target def __getattr__(self, name): func = getattr(self._target, name) @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: reraise(*e.exc_info) return wrapper def spawn(func, *args, **kwargs): """Passthrough method for eventlet.spawn. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() return func(*args, **kwargs) return eventlet.spawn(context_wrapper, *args, **kwargs) def spawn_n(func, *args, **kwargs): """Passthrough method for eventlet.spawn_n. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn_n it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() func(*args, **kwargs) eventlet.spawn_n(context_wrapper, *args, **kwargs) @contextlib.contextmanager def tempdir(**kwargs): argdict = kwargs.copy() if 'dir' not in argdict: argdict['dir'] = CONF.tempdir tmpdir = tempfile.mkdtemp(**argdict) try: yield tmpdir finally: try: shutil.rmtree(tmpdir) except OSError as e: LOG.error('Could not remove tmpdir: %s', e) def validate_integer(value, name, min_value=None, max_value=None): """Make sure that value is a valid integer, potentially within range.""" try: return strutils.validate_integer(value, name, min_value, max_value) except ValueError as e: raise exception.InvalidInput(reason=e) def synchronized(name, semaphores=None, blocking=False): def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): lock_name = 'masakari-%s' % name int_lock = lockutils.internal_lock(lock_name, semaphores=semaphores) LOG.debug("Acquiring lock: %(lock_name)s on resource: " "%(resource)s", {'lock_name': lock_name, 'resource': f.__name__}) if not int_lock.acquire(blocking=blocking): raise exception.LockAlreadyAcquired(resource=name) try: return f(*args, **kwargs) finally: LOG.debug("Releasing lock: %(lock_name)s on resource: " "%(resource)s", {'lock_name': lock_name, 'resource': f.__name__}) int_lock.release() return inner return wrap ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/version.py0000664000175100017510000000451415033036143017261 0ustar00mylesmyles# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pbr import version as pbr_version MASAKARI_VENDOR = "OpenStack Foundation" MASAKARI_PRODUCT = "OpenStack Masakari" MASAKARI_PACKAGE = None # OS distro package version suffix loaded = False version_info = pbr_version.VersionInfo('masakari') version_string = version_info.version_string def _load_config(): # Don't load in global context, since we can't assume # these modules are accessible when distutils uses # this module import configparser from oslo_config import cfg from oslo_log import log as logging global loaded, MASAKARI_VENDOR, MASAKARI_PRODUCT, MASAKARI_PACKAGE if loaded: return loaded = True cfgfile = cfg.CONF.find_file("release") if cfgfile is None: return try: cfg = configparser.RawConfigParser() cfg.read(cfgfile) if cfg.has_option("Masakari", "vendor"): MASAKARI_VENDOR = cfg.get("Masakari", "vendor") if cfg.has_option("Masakari", "product"): MASAKARI_PRODUCT = cfg.get("Masakari", "product") if cfg.has_option("Masakari", "package"): MASAKARI_PACKAGE = cfg.get("Masakari", "package") except Exception as ex: LOG = logging.getLogger(__name__) LOG.error("Failed to load %(cfgfile)s: %(ex)s", {'cfgfile': cfgfile, 'ex': ex}) def vendor_string(): _load_config() return MASAKARI_VENDOR def product_string(): _load_config() return MASAKARI_PRODUCT def package_string(): _load_config() return MASAKARI_PACKAGE def version_string_with_package(): if package_string() is None: return version_info.version_string() else: return "%s-%s" % (version_info.version_string(), package_string()) ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53271 masakari-19.1.0.dev18/masakari/wsgi/0000775000175100017510000000000015033036146016172 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/wsgi/__init__.py0000664000175100017510000000000015033036143020266 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari/wsgi/api.py0000664000175100017510000000141515033036143017313 0ustar00mylesmyles# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """WSGI application entry-point for Masakari API.""" import threading from masakari.cmd import api application = None lock = threading.Lock() with lock: if application is None: application = api.initialize_application() ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52571 masakari-19.1.0.dev18/masakari.egg-info/0000775000175100017510000000000015033036146016713 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/PKG-INFO0000644000175100017510000001135615033036143020011 0ustar00mylesmylesMetadata-Version: 2.2 Name: masakari Version: 19.1.0.dev18 Summary: Virtual Machine High Availability (VMHA) service for OpenStack Home-page: https://docs.openstack.org/masakari/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: Implementation :: CPython Requires-Python: >=3.10 License-File: LICENSE Requires-Dist: alembic>=1.8.0 Requires-Dist: iso8601>=0.1.11 Requires-Dist: jsonschema>=3.2.0 Requires-Dist: keystoneauth1>=3.4.0 Requires-Dist: keystonemiddleware>=4.17.0 Requires-Dist: WebOb>=1.7.1 Requires-Dist: microversion-parse>=0.2.1 Requires-Dist: oslo.config>=5.2.0 Requires-Dist: oslo.context>=2.19.2 Requires-Dist: oslo.db>=4.44.0 Requires-Dist: oslo.messaging>=14.1.0 Requires-Dist: oslo.i18n>=3.15.3 Requires-Dist: oslo.log>=3.36.0 Requires-Dist: oslo.middleware>=3.31.0 Requires-Dist: oslo.policy>=4.5.0 Requires-Dist: oslo.service!=1.28.1,>=1.24.0 Requires-Dist: oslo.upgradecheck>=1.3.0 Requires-Dist: oslo.utils>=4.7.0 Requires-Dist: oslo.versionedobjects>=1.31.2 Requires-Dist: pbr!=2.1.0,>=2.0.0 Requires-Dist: python-novaclient>=9.1.0 Requires-Dist: stevedore>=1.20.0 Requires-Dist: SQLAlchemy>=1.2.19 Requires-Dist: SQLAlchemy-Utils>=0.33.10 Requires-Dist: taskflow>=2.16.0 Requires-Dist: tooz>=2.10.1 Dynamic: author Dynamic: author-email Dynamic: classifier Dynamic: description Dynamic: requires-dist Dynamic: requires-python Dynamic: summary ======== Masakari ======== Virtual Machine High Availability (VMHA) service for OpenStack Masakari provides Virtual Machine High Availability (VMHA) service for OpenStack clouds by automatically recovering the KVM-based Virtual Machine(VM)s from failure events such as VM process down, provisioning process down, and nova-compute host failure. It also provides API service for manage and control the automated rescue mechanism. NOTE: Use masakari only if instance path is configured on shared storage system i.e, 'instances_path' config option of nova has a path of shared directory otherwise instance data will be lost after the evacuation of instance from failed host if, * instance is booted from image * flavor using ephemeral disks is used Original version of Masakari: https://github.com/ntt-sic/masakari Tokyo Summit Session: https://www.youtube.com/watch?v=BmjNKceW_9A Masakari is distributed under the terms of the Apache License, Version 2.0. The full terms and conditions of this license are detailed in the LICENSE file. * Free software: Apache license 2.0 * Documentation: https://docs.openstack.org/masakari/latest * Release notes: https://docs.openstack.org/releasenotes/masakari/ * Source: https://opendev.org/openstack/masakari * Bugs: https://bugs.launchpad.net/masakari Configure masakari-api ---------------------- #. Create masakari user: .. code-block:: shell-session openstack user create --password-prompt masakari (give password as masakari) #. Add admin role to masakari user: .. code-block:: shell-session openstack role add --project service --user masakari admin #. Create new service: .. code-block:: shell-session openstack service create --name masakari --description "masakari high availability" instance-ha #. Create endpoint for masakari service: .. code-block:: shell-session openstack endpoint create --region RegionOne masakari --publicurl http://:/v1/%\(tenant_id\)s #. Clone masakari using .. code-block:: shell-session git clone https://github.com/openstack/masakari.git #. Run setup.py from masakari .. code-block:: shell-session sudo python setup.py install #. Create directory ``/etc/masakari`` #. Copy ``masakari.conf``, ``api-paste.ini`` and ``policy.yaml`` file from ``masakari/etc/`` to ``/etc/masakari`` folder #. To run masakari-api simply use following binary: .. code-block:: shell-session masakari-api Configure masakari database --------------------------- #. Create 'masakari' database #. After running setup.py for masakari (``sudo python setup.py install``), run ``masakari-manage`` command to sync the database .. code-block:: shell-session masakari-manage db sync Features -------- * TODO ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/SOURCES.txt0000664000175100017510000003712515033036143020604 0ustar00mylesmyles.coveragerc .mailmap .pre-commit-config.yaml .stestr.conf .yamllint .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog HACKING.rst LICENSE MANIFEST.in README.rst bindep.txt pyproject.toml requirements.txt setup.cfg setup.py test-requirements.txt tox.ini api-ref/source/conf.py api-ref/source/failover-segments.inc api-ref/source/hosts.inc api-ref/source/index.rst api-ref/source/notifications.inc api-ref/source/parameters.yaml api-ref/source/status.yaml api-ref/source/versions.inc api-ref/source/vmoves.inc devstack/README.rst devstack/plugin.sh devstack/settings doc/requirements.txt doc/api_samples/hosts/host-create-req.json doc/api_samples/hosts/host-create-resp.json doc/api_samples/hosts/host-get-resp.json doc/api_samples/hosts/host-update-req.json doc/api_samples/hosts/host-update-resp.json doc/api_samples/hosts/hosts-list-resp.json doc/api_samples/notifications/host-notification-create-req.json doc/api_samples/notifications/host-notification-create-resp.json doc/api_samples/notifications/notifcations-list-resp.json doc/api_samples/notifications/notification-get-resp.json doc/api_samples/notifications/process-notification-create-req.json doc/api_samples/notifications/process-notification-create-resp.json doc/api_samples/notifications/vm-notification-create-req.json doc/api_samples/notifications/vm-notification-create-resp.json doc/api_samples/segments/segment-create-req.json doc/api_samples/segments/segment-create-resp.json doc/api_samples/segments/segment-get-resp.json doc/api_samples/segments/segment-update-req.json doc/api_samples/segments/segment-update-resp.json doc/api_samples/segments/segments-list-resp.json doc/api_samples/versions/v1-version-get-resp.json doc/api_samples/versions/versions-get-resp.json doc/api_samples/vmoves/vmove-get-resp.json doc/api_samples/vmoves/vmoves-list-resp.json doc/ext/__init__.py doc/ext/versioned_notifications.py doc/notification_samples/create-host-end.json doc/notification_samples/create-host-start.json doc/notification_samples/create-notification-end.json doc/notification_samples/create-notification-start.json doc/notification_samples/create-segment-end.json doc/notification_samples/create-segment-start.json doc/notification_samples/delete-host-end.json doc/notification_samples/delete-host-start.json doc/notification_samples/delete-segment-end.json doc/notification_samples/delete-segment-start.json doc/notification_samples/error-exception.json doc/notification_samples/process-notification-end.json doc/notification_samples/process-notification-error.json doc/notification_samples/process-notification-start.json doc/notification_samples/update-host-end.json doc/notification_samples/update-host-start.json doc/notification_samples/update-segment-end.json doc/notification_samples/update-segment-start.json doc/source/conf.py doc/source/index.rst doc/source/_static/Masakari_spec_process.svg doc/source/_static/architecture.png doc/source/cli/index.rst doc/source/cli/masakari-manage.rst doc/source/cli/masakari-status.rst doc/source/cli/openstack-masakari.rst doc/source/configuration/api-paste.ini.rst doc/source/configuration/config.rst doc/source/configuration/index.rst doc/source/configuration/policy.rst doc/source/configuration/recovery_config.rst doc/source/configuration/recovery_workflow_custom_task.rst doc/source/configuration/recovery_workflow_sample_config.rst doc/source/configuration/sample_config.rst doc/source/configuration/sample_policy.rst doc/source/contributor/code_structure.rst doc/source/contributor/contributing.rst doc/source/contributor/release_notes.rst doc/source/install/development.environment.rst doc/source/install/index.rst doc/source/install/install_and_configure.rst doc/source/install/install_and_configure_ubuntu.rst doc/source/install/overview.rst doc/source/install/verify.rst doc/source/user/architecture.rst doc/source/user/how_to_get_involved.rst doc/source/user/notifications.rst doc/source/user/process.rst etc/masakari/README-masakari.conf.txt etc/masakari/api-paste.ini etc/masakari/masakari-config-generator.conf etc/masakari/masakari-custom-recovery-methods.conf etc/masakari/masakari-customized-recovery-flow-config-generator.conf etc/masakari/masakari-policy-generator.conf masakari/__init__.py masakari/config.py masakari/context.py masakari/coordination.py masakari/exception.py masakari/i18n.py masakari/manager.py masakari/policy.py masakari/rpc.py masakari/safe_utils.py masakari/service.py masakari/utils.py masakari/version.py masakari.egg-info/PKG-INFO masakari.egg-info/SOURCES.txt masakari.egg-info/dependency_links.txt masakari.egg-info/entry_points.txt masakari.egg-info/not-zip-safe masakari.egg-info/pbr.json masakari.egg-info/requires.txt masakari.egg-info/top_level.txt masakari/api/__init__.py masakari/api/api_version_request.py masakari/api/auth.py masakari/api/urlmap.py masakari/api/utils.py masakari/api/versioned_method.py masakari/api/wsgi.py masakari/api/openstack/__init__.py masakari/api/openstack/common.py masakari/api/openstack/extensions.py masakari/api/openstack/wsgi.py masakari/api/openstack/ha/__init__.py masakari/api/openstack/ha/extension_info.py masakari/api/openstack/ha/hosts.py masakari/api/openstack/ha/notifications.py masakari/api/openstack/ha/segments.py masakari/api/openstack/ha/versions.py masakari/api/openstack/ha/versionsV1.py masakari/api/openstack/ha/vmoves.py masakari/api/openstack/ha/schemas/__init__.py masakari/api/openstack/ha/schemas/hosts.py masakari/api/openstack/ha/schemas/notifications.py masakari/api/openstack/ha/schemas/payload.py masakari/api/openstack/ha/schemas/segments.py masakari/api/openstack/ha/views/__init__.py masakari/api/openstack/ha/views/hosts.py masakari/api/openstack/ha/views/versions.py masakari/api/validation/__init__.py masakari/api/validation/parameter_types.py masakari/api/validation/validators.py masakari/cmd/__init__.py masakari/cmd/api.py masakari/cmd/engine.py masakari/cmd/manage.py masakari/cmd/status.py masakari/common/__init__.py masakari/common/config.py masakari/compute/__init__.py masakari/compute/nova.py masakari/conf/__init__.py masakari/conf/api.py masakari/conf/base.py masakari/conf/coordination.py masakari/conf/database.py masakari/conf/engine.py masakari/conf/engine_driver.py masakari/conf/exceptions.py masakari/conf/nova.py masakari/conf/opts.py masakari/conf/osapi_v1.py masakari/conf/paths.py masakari/conf/service.py masakari/conf/ssl.py masakari/conf/wsgi.py masakari/db/__init__.py masakari/db/api.py masakari/db/migration.py masakari/db/sqlalchemy/__init__.py masakari/db/sqlalchemy/alembic.ini masakari/db/sqlalchemy/api.py masakari/db/sqlalchemy/migration.py masakari/db/sqlalchemy/models.py masakari/db/sqlalchemy/migrations/README.rst masakari/db/sqlalchemy/migrations/env.py masakari/db/sqlalchemy/migrations/script.py.mako masakari/db/sqlalchemy/migrations/versions/13adff5efb9a_extend_notification_table.py masakari/db/sqlalchemy/migrations/versions/8bdf5929c5a6_add_vm_moves_table.py masakari/db/sqlalchemy/migrations/versions/8f848eb45d03_initial_revision.py masakari/engine/__init__.py masakari/engine/driver.py masakari/engine/instance_events.py masakari/engine/manager.py masakari/engine/rpcapi.py masakari/engine/utils.py masakari/engine/drivers/__init__.py masakari/engine/drivers/taskflow/__init__.py masakari/engine/drivers/taskflow/base.py masakari/engine/drivers/taskflow/driver.py masakari/engine/drivers/taskflow/host_failure.py masakari/engine/drivers/taskflow/instance_failure.py masakari/engine/drivers/taskflow/no_op.py masakari/engine/drivers/taskflow/process_failure.py masakari/ha/__init__.py masakari/ha/api.py masakari/hacking/__init__.py masakari/hacking/checks.py masakari/notifications/__init__.py masakari/notifications/objects/__init__.py masakari/notifications/objects/base.py masakari/notifications/objects/exception.py masakari/notifications/objects/notification.py masakari/objects/__init__.py masakari/objects/base.py masakari/objects/fields.py masakari/objects/host.py masakari/objects/notification.py masakari/objects/segment.py masakari/objects/vmove.py masakari/policies/__init__.py masakari/policies/base.py masakari/policies/extension_info.py masakari/policies/hosts.py masakari/policies/notifications.py masakari/policies/segments.py masakari/policies/versions.py masakari/policies/vmoves.py masakari/tests/__init__.py masakari/tests/fixtures.py masakari/tests/json_ref.py masakari/tests/uuidsentinel.py masakari/tests/functional/__init__.py masakari/tests/functional/base.py masakari/tests/functional/notification_base.py masakari/tests/functional/test_hosts.py masakari/tests/functional/test_process_notifications.py masakari/tests/functional/test_segments.py masakari/tests/functional/test_vm_notifications.py masakari/tests/unit/__init__.py masakari/tests/unit/base.py masakari/tests/unit/conf_fixture.py masakari/tests/unit/fake_notifier.py masakari/tests/unit/fake_policy.py masakari/tests/unit/fakes.py masakari/tests/unit/policy_fixture.py masakari/tests/unit/test_api_validation.py masakari/tests/unit/test_conf.py masakari/tests/unit/test_context.py masakari/tests/unit/test_exception.py masakari/tests/unit/test_hacking.py masakari/tests/unit/test_masakari_manage.py masakari/tests/unit/test_policy.py masakari/tests/unit/test_rpc.py masakari/tests/unit/test_safeutils.py masakari/tests/unit/test_service.py masakari/tests/unit/test_utils.py masakari/tests/unit/test_versions.py masakari/tests/unit/utils.py masakari/tests/unit/api/__init__.py masakari/tests/unit/api/test_api_version_request.py masakari/tests/unit/api/test_auth.py masakari/tests/unit/api/test_utils.py masakari/tests/unit/api/test_wsgi.py masakari/tests/unit/api/openstack/__init__.py masakari/tests/unit/api/openstack/fakes.py masakari/tests/unit/api/openstack/test_common.py masakari/tests/unit/api/openstack/test_extensions.py masakari/tests/unit/api/openstack/test_wsgi.py masakari/tests/unit/api/openstack/ha/__init__.py masakari/tests/unit/api/openstack/ha/test_extension_info.py masakari/tests/unit/api/openstack/ha/test_hosts.py masakari/tests/unit/api/openstack/ha/test_notifications.py masakari/tests/unit/api/openstack/ha/test_segments.py masakari/tests/unit/api/openstack/ha/test_versions.py masakari/tests/unit/api/openstack/ha/test_vmoves.py masakari/tests/unit/cmd/__init__.py masakari/tests/unit/cmd/test_masakari_api.py masakari/tests/unit/cmd/test_status.py masakari/tests/unit/compute/__init__.py masakari/tests/unit/compute/test_nova.py masakari/tests/unit/db/__init__.py masakari/tests/unit/db/test_db_api.py masakari/tests/unit/db/test_migrations.py masakari/tests/unit/db/test_purge.py masakari/tests/unit/engine/__init__.py masakari/tests/unit/engine/fake_engine.py masakari/tests/unit/engine/test_engine_mgr.py masakari/tests/unit/engine/test_rpcapi.py masakari/tests/unit/engine/test_utils.py masakari/tests/unit/engine/drivers/__init__.py masakari/tests/unit/engine/drivers/taskflow/__init__.py masakari/tests/unit/engine/drivers/taskflow/test_host_failure_flow.py masakari/tests/unit/engine/drivers/taskflow/test_instance_failure_flow.py masakari/tests/unit/engine/drivers/taskflow/test_process_failure_flow.py masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py masakari/tests/unit/ha/__init__.py masakari/tests/unit/ha/test_api.py masakari/tests/unit/monkey_patch_example/__init__.py masakari/tests/unit/monkey_patch_example/example_a.py masakari/tests/unit/monkey_patch_example/example_b.py masakari/tests/unit/notifications/__init__.py masakari/tests/unit/notifications/objects/__init__.py masakari/tests/unit/notifications/objects/test_notification.py masakari/tests/unit/objects/__init__.py masakari/tests/unit/objects/fake_args.py masakari/tests/unit/objects/test_fields.py masakari/tests/unit/objects/test_hosts.py masakari/tests/unit/objects/test_notifications.py masakari/tests/unit/objects/test_objects.py masakari/tests/unit/objects/test_segments.py masakari/tests/unit/objects/test_vmoves.py masakari/wsgi/__init__.py masakari/wsgi/api.py playbooks/devstack/post.yaml playbooks/devstack/pre.yaml playbooks/devstack/run-devstack.yaml playbooks/devstack/run-tox.yaml playbooks/devstack/run-verify-config-is-ipv6-only.yaml releasenotes/notes/.placeholder releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yaml releasenotes/notes/blueprint-add-vmoves-348fd430aa936721.yaml releasenotes/notes/blueprint-support-nova-system-scope-policies-c4dbd244dd3fcf1a.yaml releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml releasenotes/notes/bug-1932194-2b721860bbc26819.yaml releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml releasenotes/notes/bug-1980736-975ee013e4612062.yaml releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml releasenotes/notes/compute-disable-reason-9570734c0bb888cf.yaml releasenotes/notes/compute_search-3da97e69e661a73f.yaml releasenotes/notes/coordination_for_host_notification-a156ec5a5839a781.yaml releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml releasenotes/notes/customisable-ha-enabled-instance-metadata-key-af511ea2aac96690.yaml releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml releasenotes/notes/deprecate-json-formatted-policy-file-57ad537ec19cc7e0.yaml releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml releasenotes/notes/drop-python-38-39-deab0b81006bae48.yaml releasenotes/notes/enabled-to-segment-7e6184feb1e4f818.yaml releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml releasenotes/notes/policy-in-code-8740d51624055044.yaml releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml releasenotes/notes/remove-masakari-wsgi-script-bb737746a17111ab.yaml releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml releasenotes/notes/switch-to-alembic-b438de67c5b22a40.yaml releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml releasenotes/source/2023.1.rst releasenotes/source/2023.2.rst releasenotes/source/2024.1.rst releasenotes/source/2024.2.rst releasenotes/source/2025.1.rst releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst releasenotes/source/unreleased.rst releasenotes/source/ussuri.rst releasenotes/source/victoria.rst releasenotes/source/wallaby.rst releasenotes/source/xena.rst releasenotes/source/yoga.rst releasenotes/source/zed.rst releasenotes/source/_static/.placeholder releasenotes/source/_templates/.placeholder releasenotes/templates/feature.yml releasenotes/templates/fix.yml roles/devstack-config/tasks/main.yml tools/test-setup.sh././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/dependency_links.txt0000664000175100017510000000000115033036143022756 0ustar00mylesmyles ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/entry_points.txt0000664000175100017510000000354715033036143022217 0ustar00mylesmyles[console_scripts] masakari-api = masakari.cmd.api:main masakari-engine = masakari.cmd.engine:main masakari-manage = masakari.cmd.manage:main masakari-status = masakari.cmd.status:main [masakari.api.v1.extensions] extension_info = masakari.api.openstack.ha.extension_info:ExtensionInfo hosts = masakari.api.openstack.ha.hosts:Hosts notifications = masakari.api.openstack.ha.notifications:Notifications segments = masakari.api.openstack.ha.segments:Segments versions = masakari.api.openstack.ha.versionsV1:Versions vmoves = masakari.api.openstack.ha.vmoves:VMoves [masakari.driver] taskflow_driver = masakari.engine.drivers.taskflow:TaskFlowDriver [masakari.task_flow.tasks] confirm_compute_node_disabled_task = masakari.engine.drivers.taskflow.process_failure:ConfirmComputeNodeDisabledTask confirm_instance_active_task = masakari.engine.drivers.taskflow.instance_failure:ConfirmInstanceActiveTask disable_compute_node_task = masakari.engine.drivers.taskflow.process_failure:DisableComputeNodeTask disable_compute_service_task = masakari.engine.drivers.taskflow.host_failure:DisableComputeServiceTask evacuate_instances_task = masakari.engine.drivers.taskflow.host_failure:EvacuateInstancesTask no_op = masakari.engine.drivers.taskflow.no_op:Noop prepare_HA_enabled_instances_task = masakari.engine.drivers.taskflow.host_failure:PrepareHAEnabledInstancesTask start_instance_task = masakari.engine.drivers.taskflow.instance_failure:StartInstanceTask stop_instance_task = masakari.engine.drivers.taskflow.instance_failure:StopInstanceTask [oslo.config.opts] customized_recovery_flow_opts = masakari.conf.opts:list_recovery_workflow_opts masakari.conf = masakari.conf.opts:list_opts [oslo.config.opts.defaults] masakari.api = masakari.common.config:set_lib_defaults [oslo.policy.enforcer] masakari = masakari.policy:get_enforcer [oslo.policy.policies] masakari = masakari.policies:list_rules ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/not-zip-safe0000664000175100017510000000000115033036143021136 0ustar00mylesmyles ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/pbr.json0000664000175100017510000000005715033036143020370 0ustar00mylesmyles{"git_version": "d75d216", "is_release": false}././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/requires.txt0000664000175100017510000000102515033036143021306 0ustar00mylesmylesalembic>=1.8.0 iso8601>=0.1.11 jsonschema>=3.2.0 keystoneauth1>=3.4.0 keystonemiddleware>=4.17.0 WebOb>=1.7.1 microversion-parse>=0.2.1 oslo.config>=5.2.0 oslo.context>=2.19.2 oslo.db>=4.44.0 oslo.messaging>=14.1.0 oslo.i18n>=3.15.3 oslo.log>=3.36.0 oslo.middleware>=3.31.0 oslo.policy>=4.5.0 oslo.service!=1.28.1,>=1.24.0 oslo.upgradecheck>=1.3.0 oslo.utils>=4.7.0 oslo.versionedobjects>=1.31.2 pbr!=2.1.0,>=2.0.0 python-novaclient>=9.1.0 stevedore>=1.20.0 SQLAlchemy>=1.2.19 SQLAlchemy-Utils>=0.33.10 taskflow>=2.16.0 tooz>=2.10.1 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/masakari.egg-info/top_level.txt0000664000175100017510000000001115033036143021432 0ustar00mylesmylesmasakari ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52071 masakari-19.1.0.dev18/playbooks/0000775000175100017510000000000015033036146015434 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53271 masakari-19.1.0.dev18/playbooks/devstack/0000775000175100017510000000000015033036146017240 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/playbooks/devstack/post.yaml0000664000175100017510000000011315033036143021101 0ustar00mylesmyles--- - hosts: all roles: - fetch-subunit-output - devstack-config ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/playbooks/devstack/pre.yaml0000664000175100017510000000006215033036143020705 0ustar00mylesmyles--- - hosts: controller roles: - ensure-tox ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/playbooks/devstack/run-devstack.yaml0000664000175100017510000000006515033036143022530 0ustar00mylesmyles--- - hosts: all roles: - orchestrate-devstack ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/playbooks/devstack/run-tox.yaml0000664000175100017510000000045215033036143021536 0ustar00mylesmyles--- - hosts: controller roles: - role: tox vars: # NOTE(yoctozepto): We need to run this play in other projects but it is # meant to run tox from this project, not the "current one". zuul_work_dir: "{{ zuul.projects['opendev.org/openstack/masakari'].src_dir }}" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/playbooks/devstack/run-verify-config-is-ipv6-only.yaml0000664000175100017510000000011415033036143025740 0ustar00mylesmyles--- - hosts: all roles: - devstack-ipv6-only-deployments-verification ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/pyproject.toml0000664000175100017510000000010515033036143016336 0ustar00mylesmyles[build-system] requires = ["pbr>=6.1.1"] build-backend = "pbr.build" ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52071 masakari-19.1.0.dev18/releasenotes/0000775000175100017510000000000015033036146016122 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53471 masakari-19.1.0.dev18/releasenotes/notes/0000775000175100017510000000000015033036146017252 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/.placeholder0000664000175100017510000000000015033036143021520 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml0000664000175100017510000000107215033036143027175 0ustar00mylesmyles--- features: - | A new module, ``masakari.wsgi``, has been added as a place to gather WSGI ``application`` objects. This is intended to ease deployment by providing a consistent location for these objects. For example, if using uWSGI then instead of: .. code-block:: ini [uwsgi] wsgi-file = /bin/masakari-wsgi You can now use: .. code-block:: ini [uwsgi] module = masakari.wsgi.api:application This also simplifies deployment with other WSGI servers that expect module paths such as gunicorn. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml0000664000175100017510000000174615033036143025665 0ustar00mylesmyles--- features: - | Added _process_unfinished_notifications to process notifications which are in error or new state. This periodic task will execute at regular interval defined by new config option 'process_unfinished_notifications_interval' defaults to 120 seconds. The notifications which are in ‘new’ status will be picked up based on a new config option ‘retry_notification_new_status_interval’ defaults to 60 seconds. To change the default execution time of periodic task, following config option needs to be set with desirable time under 'DEFAULT' section in 'masakari.conf' file:: [DEFAULT] process_unfinished_notifications_interval = 120 To change the default identification time of notifications which are stuck in 'NEW' state, following config option needs to be set with desirable time under 'DEFAULT' section in 'masakari.conf' file:: [DEFAULT] retry_notification_new_status_interval = 60 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml0000664000175100017510000000073515033036143027342 0ustar00mylesmyles--- prelude: > Added new tool ``masakari-status upgrade check``. features: - | New framework for ``masakari-status upgrade check`` command is added. This framework allows adding various checks which can be run before a Masakari upgrade to ensure if the upgrade can be performed safely. upgrade: - | Operator can now use new CLI tool ``masakari-status upgrade check`` to check if Masakari deployment can be safely upgraded from N-1 to N release. ././@PaxHeader0000000000000000000000000000020700000000000010214 xustar00113 path=masakari-19.1.0.dev18/releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml 22 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.y0000664000175100017510000000125615033036143031725 0ustar00mylesmyles--- features: - | Operators can decide whether error instances should be allowed for evacuation along with other instances from a failed source compute node or not. Added a new config option ``ignore_instances_in_error_state`` to achieve this. When set to True, masakari will skip the recovery of error instances otherwise it will evacuate error instances as well from a failed source compute node. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] ignore_instances_in_error_state = False The default value for this config option is set to False. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml0000664000175100017510000000270515033036143030164 0ustar00mylesmyles--- features: - | Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be allowed for evacuation from a failed source compute node. When set to True, it will evacuate all instances from a failed source compute node. First preference will be given to those instances which contain 'HA_Enabled=True' metadata key, and then it will evacuate the remaining ones. When set to False, it will evacuate only those instances which contain 'HA_Enabled=True' metadata key. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] evacuate_all_instances = True - | Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be taken into account to recover from instance failure events. When set to True, it will execute instance failure recovery actions for an instance irrespective of whether that particular instance contains metadata key 'HA_Enabled=True' or not. When set to False, it will only execute instance failure recovery action for an instance which contain metadata key 'HA_Enabled=True'. To use this feature, following config option need to be set under ``instance_failure`` section in 'masakari.conf' file:: [instance_failure] process_all_instances = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml0000664000175100017510000000064015033036143030572 0ustar00mylesmyles--- features: - | Operators can now decide based on the new config option 'add_reserved_host_to_aggregate' whether to add or not a reserved_host to all host aggregates which failed compute host belongs to. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] add_reserved_host_to_aggregate = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml0000664000175100017510000000050715033036143030161 0ustar00mylesmyles--- other: - | Adopt oslo-config-generator to generate sample config files. New config options from masakari code should register with masakari/conf/opts.py. A deprecated option should add a deprecated group even if it didn't alter its group, otherwise the deprecated group will use 'DEFAULT' by default. ././@PaxHeader0000000000000000000000000000021500000000000010213 xustar00119 path=masakari-19.1.0.dev18/releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yaml 22 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa0000664000175100017510000000127415033036143032625 0ustar00mylesmyles--- features: - | Implemented workflow for 'auto_priority' and 'rh_priority' recovery methods in case of host failure recovery. Operators can set failover_segment's recovery_method as 'auto_priority' and 'rh_priority' now. In case of 'auto_priority' the 'auto' workflow will be executed first to recover the instances from failed compute host. If 'auto' workflow fails to recover the instances then 'reserved_host' workflow will be tried. In case of 'rh_priority' the 'reserved_host' workflow will be executed first to recover the instances from failed compute host. If 'reserved_host' workflow fails to recover the instances then 'auto' workflow will be tried. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/blueprint-add-vmoves-348fd430aa936721.yaml0000664000175100017510000000066015033036143026257 0ustar00mylesmyles--- features: - | It persists vm moves information for one host failure notification into the database, which could help users to insight the process or result of the host recovery workflow, such as which vms evacuated succeed or failed, and which ones are still evacuating. Added VMove API in microversion 1.3, please refer to .. _`VMoves`: https://docs.openstack.org/api-ref/instance-ha/#vmoves-vmoves ././@PaxHeader0000000000000000000000000000021300000000000010211 xustar00117 path=masakari-19.1.0.dev18/releasenotes/notes/blueprint-support-nova-system-scope-policies-c4dbd244dd3fcf1a.yaml 22 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/blueprint-support-nova-system-scope-policies-c4dbd244dd3fcf0000664000175100017510000000033315033036143032370 0ustar00mylesmyles--- features: - | Allows to use system-scoped tokens when contacting Nova. `Blueprint support-nova-system-scope-policies `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml0000664000175100017510000000037215033036143026004 0ustar00mylesmyles--- features: - | Masakari has been enabled for mutable config. Below option may be reloaded by sending SIGHUP to the correct process. 'retry_notification_new_status_interval' option will apply to process unfinished notifications. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml0000664000175100017510000000026015033036143023702 0ustar00mylesmyles--- fixes: - | Fixes ``/v1/`` API path which returned 404 ResourceNotFound preventing microversion discovery. `LP#1685145 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml0000664000175100017510000000021015033036143024162 0ustar00mylesmyles--- fixes: - | Allows segment description to contain new line characters. `LP#1776385 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml0000664000175100017510000000026315033036143024114 0ustar00mylesmyles--- fixes: - | Fixes Masakari Engine not to try to stop an already stopped instance and fail with 409 from Nova. `LP#1782517 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml0000664000175100017510000000025615033036143023755 0ustar00mylesmyles--- fixes: - | Adds ``reserved_host`` to all aggregates of the failing host, instead of just the first one. `LP#1856164 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml0000664000175100017510000000047515033036143024045 0ustar00mylesmyles--- fixes: - | Fixes Masakari Engine not to wait for timeout when it's known that the evacuation has failed. `LP#1859406 `__ (This fix has already been included in the first Victoria release, 10.0.0, but it was not mentioned in the release notes previously.) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml0000664000175100017510000000023715033036143024135 0ustar00mylesmyles--- fixes: - | Fixes API microversion reporting to report the latest supported microversion. `LP#1882516 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1932194-2b721860bbc26819.yaml0000664000175100017510000000026415033036143023616 0ustar00mylesmyles--- fixes: - | Fixes Masakari API to properly return error codes for invalid requests to the user instead of 500. `LP#1932194 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml0000664000175100017510000000050615033036143024053 0ustar00mylesmyles--- fixes: - | Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-1980736-975ee013e4612062.yaml0000664000175100017510000000023015033036143023534 0ustar00mylesmyles--- fixes: - | Fixes "Instance stopping fails randomly due to already stopped instances". `LP#1980736 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml0000664000175100017510000000043415033036143027324 0ustar00mylesmyles--- prelude: > Domain name is needed when using keystone v3 to create keystone session, if not provided, InvalidInput exception will be raised. Two new options "os_user_domain_name" and "os_project_domain_name" with default value "default" are added to fix the issue. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/compute-disable-reason-9570734c0bb888cf.yaml0000664000175100017510000000025215033036143026644 0ustar00mylesmyles--- features: - | Nova compute service "disable reason" is now set in case of host or process failure. It can be customised per type of failure via config. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/compute_search-3da97e69e661a73f.yaml0000664000175100017510000000046015033036143025370 0ustar00mylesmyles--- fixes: - | Fixes validation of compute host existence from checking hypervisor list to compute service list. Since masakari needs to match nova compute service hostname with the one in pacemaker cluster and added to API for correctly processing hostmonitors failover notifications. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/coordination_for_host_notification-a156ec5a5839a781.yaml0000664000175100017510000000076415033036143031445 0ustar00mylesmyles--- fixes: - | Fixes an issue which triggers two recovery workflows for the same host failure. `LP#1961110 `__ It introduces distributed lock for Masakari-api services when handle the concurrent notifications for the same host failure from multiple Masakari-hostmonitor services. To enable coordination, the user needs to set the new configuration option ``[coordination]backend_url``, which specifies the backend. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml0000664000175100017510000000125115033036143027042 0ustar00mylesmyles--- fixes: - | Fixes `bug 1645699`_ which will return correct response codes for below apis: - POST /v1/notification - old_response: 200, new_response: 202 - DELETE /v1/notification - old_response: 404, new_response: 405 - PUT /v1/notification/ - old_response: 404, new_response: 405 - POST /v1/host - old_response: 200, new_response: 201 - DELETE /v1/host/ - old_response: 200, new_response: 204 - POST /v1/segment - old_response: 200, new_response: 201 - DELETE /v1/segment/ - old_response: 200, new_response: 204 .. _bug 1645699: https://bugs.launchpad.net/masakari/+bug/1645699 ././@PaxHeader0000000000000000000000000000021400000000000010212 xustar00118 path=masakari-19.1.0.dev18/releasenotes/notes/customisable-ha-enabled-instance-metadata-key-af511ea2aac96690.yaml 22 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/customisable-ha-enabled-instance-metadata-key-af511ea2aac960000664000175100017510000000062315033036143032040 0ustar00mylesmyles--- features: - | Adds ``ha_enabled_instance_metadata_key`` config option to ``host_failure`` and ``instance_failure`` config groups. This option allows operators to override the default ``HA_Enabled`` instance metadata key which controls the behaviour of Masakari towards the instance. This way one can have different keys for different failure types (host vs instance failures). ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml0000664000175100017510000000066215033036143025640 0ustar00mylesmyles--- features: - | Operators can now purge the soft-deleted records from the database tables. Added below command to purge the records: ``masakari-manage db purge --age_in_days --max_rows `` NOTE: ``notifications`` db records will be purged on the basis of ``update_at`` and ``status`` fields (finished, ignored, failed) as these records will not be automatically soft-deleted by the system. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/deprecate-json-formatted-policy-file-57ad537ec19cc7e0.yaml0000664000175100017510000000176015033036143031540 0ustar00mylesmyles--- upgrade: - | The default value of ``[oslo_policy] policy_file`` config option has been changed from ``policy.json`` to ``policy.yaml``. Operators who are utilizing customized or previously generated static policy JSON files (which are not needed by default), should generate new policy files or convert them in YAML format. Use the `oslopolicy-convert-json-to-yaml `_ tool to convert a JSON to YAML formatted policy file in backward compatible way. deprecations: - | Use of JSON policy files was deprecated by the ``oslo.policy`` library during the Victoria development cycle. As a result, this deprecation is being noted in the Wallaby cycle with an anticipated future removal of support by ``oslo.policy``. As such operators will need to convert to YAML policy files. Please see the upgrade notes for details on migration of any custom policy files. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml0000664000175100017510000000020215033036143026046 0ustar00mylesmyles--- deprecations: - | The ``masakari_topic`` config option is now deprecated and will be removed in the Queens release. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml0000664000175100017510000000031615033036143024402 0ustar00mylesmyles--- upgrade: - | Python 2.7 support has been dropped. Last release of Masakari to support python 2.7 is OpenStack Train. The minimum version of Python now supported by Masakari is Python 3.6. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/drop-python-38-39-deab0b81006bae48.yaml0000664000175100017510000000021715033036143025435 0ustar00mylesmyles--- upgrade: - | Support for Python 3.8 and Python 3.9 has been removed. The minimum supported version of Python is now Python 3.10. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/enabled-to-segment-7e6184feb1e4f818.yaml0000664000175100017510000000043115033036143026035 0ustar00mylesmyles--- features: - | Sometimes, operators want to temporarily disable instance-ha function. This version adds 'enabled' to segment. If the segment 'enabled' value is set False, all notifications of this segment will be ignored and no recovery methods will execute. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml0000664000175100017510000000120215033036143027123 0ustar00mylesmyles--- fixes: - | Fixes `bug 1693728`_ which will fix the race condition where after evacuation of an instance to other host user might perform some actions on that instance which gives wrong instance vm_state to ConfirmEvacuationTask that results into notification failure. To fix this issue, following config option is added under ``DEFAULT`` section in 'masakari.conf' file:: [DEFAULT] host_failure_recovery_threads = 3 This config option decides the number of threads going to be used for evacuating the instances. .. _`bug 1693728`: https://bugs.launchpad.net/masakari/+bug/1693728 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml0000664000175100017510000000067415033036143026771 0ustar00mylesmyles--- features: - | Added following new REST API's for masakari operators: - GET /v1/segments - Returns list of all failover segments. - GET /v1/segments/ - Returns specific failover segment with uuid. - POST /v1/segments - Creates a new failover segment - PUT /v1/segments/ - Updates a failover segment by uuid - DELETE /v1/segments/ - Delete a failover segment by uuid ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml0000664000175100017510000000033315033036143026142 0ustar00mylesmyles--- fixes: - | Fixes an issue where a periodic task in Masakari Engine could loop forever querying Nova API following a failed evacuation. `LP#1897888 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml0000664000175100017510000000026115033036143030471 0ustar00mylesmyles--- fixes: - | Fixes an issue where failure notification stuck into running status when timeout. `LP#1996835 `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml0000664000175100017510000000116115033036143024350 0ustar00mylesmyles--- features: - | Added following new REST API's for masakari operators: - GET /v1/segments//hosts - Returns list of all hosts associated with failover segment. - GET /v1/segments//hosts/ - Returns specific host from the failover segment with uuid. - POST /v1/segments//hosts - Creates a new host in failover segment - PUT /v1/segments//hosts/ - Updates a host in failover segment by uuid - DELETE /v1/segments//hosts/ - Delete a host from failover segment by uuid ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml0000664000175100017510000000122215033036143027510 0ustar00mylesmyles--- features: - | Added support to emit event notifications whenever user interacts with Masakari restFul APIs. The emitted notifications are documented at `sample_payloads`_. To enable this feature one should set `driver` config option under the `oslo_messaging_notifications` section as shown below:: [oslo_messaging_notifications] driver = log Note: Possible values are `messaging`, `messagingv2`, `routing`, `log`, `test`, `noop`. Notifications can be completely disabled by setting `driver` value as `noop` .. _`sample_payloads`: https://docs.openstack.org/masakari/latest/#versioned-notifications ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml0000664000175100017510000000045715033036143026172 0ustar00mylesmyles--- features: - | Added following new REST API's related to notifications: - GET /v1/notifications - Returns list of all notifications. - GET /v1/notifications/ - Returns specific notification with uuid. - POST /v1/notifications - Creates a new notification. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/policy-in-code-8740d51624055044.yaml0000664000175100017510000000217415033036143024602 0ustar00mylesmyles--- features: - | Masakari now support policy in code, which means if operators doesn't need to modify any of the default policy rules, they do not need a policy file. Operators can modify/generate a ``policy.yaml.sample`` file which will override specific policy rules from their defaults. Masakari is now configured to work with two oslo.policy CLI scripts that have been added: - The first of these can be called like ``oslopolicy-list-redundant --namespace masakari`` and will output a list of policy rules in policy.[json|yaml] that match the project defaults. These rules can be removed from the policy file as they have no effect there. - The second script can be called like ``oslopolicy-policy-generator --namespace masakari --output-file policy-merged.yaml`` and will populate the policy-merged.yaml file with the effective policy. This is the merged results of project defaults and config file overrides. NOTE: Default `policy.json` file is now removed as Masakari now uses default policies. A policy file is only needed if overriding one of the defaults. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml0000664000175100017510000000232315033036143031455 0ustar00mylesmyles--- features: - | Added support to record the recovery workflow details of the notification which will be returned in a new microversion 1.1 in `GET /notifications/{notification_id}` API. For example, GET /notifications/ response will contain `recovery_workflow_details` parameter as shown here `notification_details`_ Added a new config section in Masakari conf file for configuring the back end to be used by taskflow driver:: [taskflow] # The back end for storing recovery_workflow details of the notification. # (string value) connection = mysql+pymysql://root:admin@127.0.0.1/?charset=utf8 # Where db_name, can be a new database or you can also specify masakari # database. Operator should run `masakari-manage db sync` command to add new db tables required for storing recovery_workflow_details. Note: When you run `masakari-manage db sync`, make sure you have `notification_driver=taskflow_driver` set in masakari.conf. .. _`notification_details`: https://developer.openstack.org/api-ref/instance-ha/?expanded=show-notification-details-detail#show-notification-details ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml0000664000175100017510000000154115033036143030232 0ustar00mylesmyles--- features: - | Operator can now customize workflows to process each type of failure notifications (hosts, instance and process) as per their requirements. Added below new config section for customized recovery flow in a new conf file masakari-custom-recovery-methods.conf - [taskflow_driver_recovery_flows] Under [taskflow_driver_recovery_flows] is added below five new config options - 'instance_failure_recovery_tasks' is a dict of tasks which will recover instance failure. - 'process_failure_recovery_tasks' is a dict of tasks which will recover process failure. - 'host_auto_failure_recovery_tasks' is a dict of tasks which will recover host failure for auto recovery. - 'host_rh_failure_recovery_tasks' is a dict of tasks which will recover host failure for rh recovery on failure host. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/remove-masakari-wsgi-script-bb737746a17111ab.yaml0000664000175100017510000000054115033036143027611 0ustar00mylesmyles--- upgrade: - | The WSGI script ``masakari-wsgi`` has been removed. Deployment tooling should instead reference the Python module path for the wsgi module in Masakari, ``masakari.wsgi.api:application`` if their chosen WSGI server supports this (gunicorn, uWSGI, etc.) or implement a ``.wsgi`` script themselves if not (mod_wsgi). ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923813.0 masakari-19.1.0.dev18/releasenotes/notes/reno.cache0000664000175100017510000012221515033036145021204 0ustar00mylesmyles--- dates: - date: 1601307802 version: 10.0.0.0rc1 - date: 1727866270 version: 18.0.0 - date: 1709663124 version: xena-eom - date: 1708615790 version: 14.0.3 - date: 1709663083 version: wallaby-eom - date: 1629376491 version: 10.0.3 - date: 1666617273 version: 11.0.3 - date: 1666617303 version: 14.0.1 - date: 1665657565 version: 12.0.1 - date: 1742407163 version: wallaby-eol - date: 1742206310 version: 19.0.0.0rc1 - date: 1504797310 version: 4.0.0 - date: 1553273089 version: 7.0.0.0rc1 - date: 1547478986 version: 7.0.0.0b1 - date: 1678094020 version: 15.0.0.0rc1 - date: 1503031336 version: 4.0.0.0rc1 - date: 1704286012 version: ussuri-eol - date: 1487264265 version: 3.0.0.0rc1 - date: 1667400009 version: wallaby-em - date: 1524579108 version: 6.0.0.0b1 - date: 1742581793 version: xena-eol - date: 1670852047 version: 14.0.2 - date: 1499414032 version: 3.0.1 - date: 1528382223 version: 6.0.0.0b2 - date: 1517391266 version: 5.0.0.0b3 - date: 1554898434 version: 7.0.0 - date: 1535638117 version: 6.0.0 - date: 1663072200 version: 14.0.0.0rc1 - date: 1679487524 version: 15.0.0 - date: 1532615358 version: 6.0.0.0b3 - date: 1642586422 version: ocata-eol - date: 1619714694 version: 10.0.2 - date: 1712142199 version: 17.0.0 - date: 1612183100 version: 10.0.1 - date: 1714387628 version: zed-eom - date: 1746177590 version: 2023.2-eol - date: 1631562511 version: 12.0.0.0rc1 - date: 1569606240 version: 8.0.0.0rc1 - date: 1587656595 version: 9.0.0.0rc1 - date: 1650287553 version: victoria-em - date: 1619714672 version: 9.1.2 - date: 1583446909 version: rocky-em - date: 1665657550 version: 11.0.2 - date: 1619714650 version: 8.1.2 - date: 1648640757 version: 13.0.0 - date: 1618398193 version: 11.0.0 - date: 1629376500 version: 11.0.1 - date: 1738568964 version: victoria-eol - date: 1708333744 version: yoga-eom - date: 1533834547 version: 6.0.0.0rc1 - date: 1666617282 version: 12.0.2 - date: 1696417435 version: 16.0.0 - date: 1646900185 version: 13.0.0.0rc1 - date: 1612183063 version: 9.1.1 - date: 1508890074 version: newton-eol - date: 1703161720 version: train-eol - date: 1636716186 version: ussuri-em - date: 1709663040 version: victoria-eom - date: 1740059688 version: 16.0.1 - date: 1666617293 version: 13.0.2 - date: 1571229370 version: 8.0.0 - date: 1602670034 version: 10.0.0 - date: 1665657581 version: 13.0.1 - date: 1726483786 version: 18.0.0.0rc1 - date: 1612190669 version: 8.1.1 - date: 1670852028 version: 12.0.3 - date: 1710400838 version: 17.0.0.0rc1 - date: 1633518780 version: 12.0.0 - date: 1684746214 version: rocky-eol - date: 1670852037 version: 13.0.3 - date: 1621002022 version: train-em - date: 1535552526 version: 6.0.0.0rc2 - date: 1681465123 version: xena-em - date: 1664971818 version: 14.0.0 - date: 1478602125 version: 2.0.0 - date: 1733402805 version: 2023.1-eom - date: 1742583733 version: yoga-eol - date: 1605204887 version: 7.1.0 - date: 1605712800 version: stein-em - date: 1694092788 version: stein-eol - date: 1629376462 version: 9.1.3 - date: 1694786087 version: 16.0.0.0rc1 - date: 1589366586 version: 9.0.0 - date: 1616774144 version: 11.0.0.0rc1 - date: 1504795343 version: 4.0.0.0rc2 - date: 1520085102 version: 5.0.0 - date: 1605204900 version: 8.1.0 - date: 1743590981 version: 19.0.0 - date: 1605204914 version: 9.1.0 file-contents: releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml: features: - "A new module, ``masakari.wsgi``, has been added as a place to gather WSGI\n\ ``application`` objects. This is intended to ease deployment by providing\n\ a consistent location for these objects. For example, if using uWSGI then\n\ instead of:\n\n.. code-block:: ini\n\n [uwsgi]\n wsgi-file = /bin/masakari-wsgi\n\ \nYou can now use:\n\n.. code-block:: ini\n\n [uwsgi]\n module = masakari.wsgi.api:application\n\ \nThis also simplifies deployment with other WSGI servers that expect module\n\ paths such as gunicorn.\n" releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml: features: - "Added _process_unfinished_notifications to process notifications\nwhich are\ \ in error or new state. This periodic task will execute at\nregular interval\ \ defined by new config option\n'process_unfinished_notifications_interval'\ \ defaults to 120 seconds. The\nnotifications which are in ‘new’ status will\ \ be picked up based on a new\nconfig option ‘retry_notification_new_status_interval’\ \ defaults\nto 60 seconds.\n\nTo change the default execution time of periodic\ \ task, following config\noption needs to be set with desirable time under 'DEFAULT'\ \ section in\n'masakari.conf' file::\n\n [DEFAULT]\n process_unfinished_notifications_interval\ \ = 120\n\nTo change the default identification time of notifications which\ \ are stuck\nin 'NEW' state, following config option needs to be set with desirable\n\ time under 'DEFAULT' section in 'masakari.conf' file::\n\n [DEFAULT]\n \ \ retry_notification_new_status_interval = 60\n" releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml: features: - 'New framework for ``masakari-status upgrade check`` command is added. This framework allows adding various checks which can be run before a Masakari upgrade to ensure if the upgrade can be performed safely. ' prelude: 'Added new tool ``masakari-status upgrade check``. ' upgrade: - 'Operator can now use new CLI tool ``masakari-status upgrade check`` to check if Masakari deployment can be safely upgraded from N-1 to N release. ' releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml: features: - "Operators can decide whether error instances should be allowed for\nevacuation\ \ along with other instances from a failed source compute node\nor not. Added\ \ a new config option ``ignore_instances_in_error_state`` to\nachieve this.\ \ When set to True, masakari will skip the recovery of error\ninstances otherwise\ \ it will evacuate error instances as well from a failed\nsource compute node.\n\ \nTo use this feature, following config option need to be set under\n``host_failure``\ \ section in 'masakari.conf' file::\n\n [host_failure]\n ignore_instances_in_error_state\ \ = False\n\nThe default value for this config option is set to False.\n" releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml: features: - "Operators can decide whether all instances or only those instances which\n\ contain metadata key 'HA_Enabled=True' should be allowed for evacuation\nfrom\ \ a failed source compute node. When set to True, it will evacuate all\ninstances\ \ from a failed source compute node. First preference will be\ngiven to those\ \ instances which contain 'HA_Enabled=True' metadata key,\nand then it will\ \ evacuate the remaining ones. When set to False, it will\nevacuate only those\ \ instances which contain 'HA_Enabled=True' metadata\nkey.\n\nTo use this feature,\ \ following config option need to be set under\n``host_failure`` section in\ \ 'masakari.conf' file::\n\n [host_failure]\n evacuate_all_instances =\ \ True\n" - "Operators can decide whether all instances or only those instances which\n\ contain metadata key 'HA_Enabled=True' should be taken into account to\nrecover\ \ from instance failure events. When set to True, it will execute\ninstance\ \ failure recovery actions for an instance irrespective of whether\nthat particular\ \ instance contains metadata key 'HA_Enabled=True' or not.\nWhen set to False,\ \ it will only execute instance failure recovery action\nfor an instance which\ \ contain metadata key 'HA_Enabled=True'.\n\nTo use this feature, following\ \ config option need to be set under\n``instance_failure`` section in 'masakari.conf'\ \ file::\n\n [instance_failure]\n process_all_instances = True\n" releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml: features: - "Operators can now decide based on the new config option\n'add_reserved_host_to_aggregate'\ \ whether to add or not a reserved_host\nto all host aggregates which failed\ \ compute host belongs to.\n\nTo use this feature, following config option need\ \ to be set under\n``host_failure`` section in 'masakari.conf' file::\n\n \ \ [host_failure]\n add_reserved_host_to_aggregate = True\n" releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml: other: Adopt oslo-config-generator to generate sample config files. New config options from masakari code should register with masakari/conf/opts.py. A deprecated option should add a deprecated group even if it didn't alter its group, otherwise the deprecated group will use 'DEFAULT' by default. releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yaml: features: - 'Implemented workflow for ''auto_priority'' and ''rh_priority'' recovery methods in case of host failure recovery. Operators can set failover_segment''s recovery_method as ''auto_priority'' and ''rh_priority'' now. In case of ''auto_priority'' the ''auto'' workflow will be executed first to recover the instances from failed compute host. If ''auto'' workflow fails to recover the instances then ''reserved_host'' workflow will be tried. In case of ''rh_priority'' the ''reserved_host'' workflow will be executed first to recover the instances from failed compute host. If ''reserved_host'' workflow fails to recover the instances then ''auto'' workflow will be tried.' releasenotes/notes/blueprint-add-vmoves-348fd430aa936721.yaml: features: - 'It persists vm moves information for one host failure notification into the database, which could help users to insight the process or result of the host recovery workflow, such as which vms evacuated succeed or failed, and which ones are still evacuating. Added VMove API in microversion 1.3, please refer to .. _`VMoves`: https://docs.openstack.org/api-ref/instance-ha/#vmoves-vmoves ' releasenotes/notes/blueprint-support-nova-system-scope-policies-c4dbd244dd3fcf1a.yaml: features: - 'Allows to use system-scoped tokens when contacting Nova. `Blueprint support-nova-system-scope-policies `__ ' releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml: features: - 'Masakari has been enabled for mutable config. Below option may be reloaded by sending SIGHUP to the correct process. ''retry_notification_new_status_interval'' option will apply to process unfinished notifications. ' releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml: fixes: - 'Fixes ``/v1/`` API path which returned 404 ResourceNotFound preventing microversion discovery. `LP#1685145 `__ ' releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml: fixes: - 'Allows segment description to contain new line characters. `LP#1776385 `__ ' releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml: fixes: - 'Fixes Masakari Engine not to try to stop an already stopped instance and fail with 409 from Nova. `LP#1782517 `__ ' releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml: fixes: - 'Adds ``reserved_host`` to all aggregates of the failing host, instead of just the first one. `LP#1856164 `__ ' releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml: fixes: - 'Fixes Masakari Engine not to wait for timeout when it''s known that the evacuation has failed. `LP#1859406 `__ (This fix has already been included in the first Victoria release, 10.0.0, but it was not mentioned in the release notes previously.) ' releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml: fixes: - 'Fixes API microversion reporting to report the latest supported microversion. `LP#1882516 `__ ' releasenotes/notes/bug-1932194-2b721860bbc26819.yaml: fixes: - 'Fixes Masakari API to properly return error codes for invalid requests to the user instead of 500. `LP#1932194 `__ ' releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml: fixes: - 'Fixes an issue that could be caused by a user sending a malformed host notification missing host status. Such notification would block the host from being added back from maintenance until manual intervention or notification expiration. `LP#1960619 `__ ' releasenotes/notes/bug-1980736-975ee013e4612062.yaml: fixes: - 'Fixes "Instance stopping fails randomly due to already stopped instances". `LP#1980736 `__ ' releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml: prelude: 'Domain name is needed when using keystone v3 to create keystone session, if not provided, InvalidInput exception will be raised. Two new options "os_user_domain_name" and "os_project_domain_name" with default value "default" are added to fix the issue. ' releasenotes/notes/compute-disable-reason-9570734c0bb888cf.yaml: features: - 'Nova compute service "disable reason" is now set in case of host or process failure. It can be customised per type of failure via config. ' releasenotes/notes/compute_search-3da97e69e661a73f.yaml: fixes: - 'Fixes validation of compute host existence from checking hypervisor list to compute service list. Since masakari needs to match nova compute service hostname with the one in pacemaker cluster and added to API for correctly processing hostmonitors failover notifications. ' releasenotes/notes/coordination_for_host_notification-a156ec5a5839a781.yaml: fixes: - 'Fixes an issue which triggers two recovery workflows for the same host failure. `LP#1961110 `__ It introduces distributed lock for Masakari-api services when handle the concurrent notifications for the same host failure from multiple Masakari-hostmonitor services. To enable coordination, the user needs to set the new configuration option ``[coordination]backend_url``, which specifies the backend. ' releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml: fixes: - "Fixes `bug 1645699`_ which will return correct response codes for below\napis:\n\ \n- POST /v1/notification - old_response: 200, new_response: 202\n- DELETE\ \ /v1/notification - old_response: 404, new_response: 405\n- PUT /v1/notification/\ \ - old_response: 404,\n new_response: 405\n- POST /v1/host - old_response:\ \ 200, new_response: 201\n- DELETE /v1/host/ - old_response: 200,\ \ new_response: 204\n- POST /v1/segment - old_response: 200, new_response:\ \ 201\n- DELETE /v1/segment/ - old_response: 200, new_response:\ \ 204\n\n.. _bug 1645699: https://bugs.launchpad.net/masakari/+bug/1645699\n" releasenotes/notes/customisable-ha-enabled-instance-metadata-key-af511ea2aac96690.yaml: features: - 'Adds ``ha_enabled_instance_metadata_key`` config option to ``host_failure`` and ``instance_failure`` config groups. This option allows operators to override the default ``HA_Enabled`` instance metadata key which controls the behaviour of Masakari towards the instance. This way one can have different keys for different failure types (host vs instance failures). ' releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml: features: - "Operators can now purge the soft-deleted records from the database tables.\n\ Added below command to purge the records:\n\n ``masakari-manage db purge --age_in_days\ \ --max_rows ``\n\nNOTE: ``notifications`` db records will be purged\ \ on the basis of ``update_at``\nand ``status`` fields (finished, ignored, failed)\ \ as these records will not be\nautomatically soft-deleted by the system.\n" releasenotes/notes/deprecate-json-formatted-policy-file-57ad537ec19cc7e0.yaml: deprecations: - 'Use of JSON policy files was deprecated by the ``oslo.policy`` library during the Victoria development cycle. As a result, this deprecation is being noted in the Wallaby cycle with an anticipated future removal of support by ``oslo.policy``. As such operators will need to convert to YAML policy files. Please see the upgrade notes for details on migration of any custom policy files. ' upgrade: - 'The default value of ``[oslo_policy] policy_file`` config option has been changed from ``policy.json`` to ``policy.yaml``. Operators who are utilizing customized or previously generated static policy JSON files (which are not needed by default), should generate new policy files or convert them in YAML format. Use the `oslopolicy-convert-json-to-yaml `_ tool to convert a JSON to YAML formatted policy file in backward compatible way. ' releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml: deprecations: - 'The ``masakari_topic`` config option is now deprecated and will be removed in the Queens release. ' releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml: upgrade: - 'Python 2.7 support has been dropped. Last release of Masakari to support python 2.7 is OpenStack Train. The minimum version of Python now supported by Masakari is Python 3.6. ' releasenotes/notes/drop-python-38-39-deab0b81006bae48.yaml: upgrade: - 'Support for Python 3.8 and Python 3.9 has been removed. The minimum supported version of Python is now Python 3.10. ' releasenotes/notes/enabled-to-segment-7e6184feb1e4f818.yaml: features: - 'Sometimes, operators want to temporarily disable instance-ha function. This version adds ''enabled'' to segment. If the segment ''enabled'' value is set False, all notifications of this segment will be ignored and no recovery methods will execute. ' releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml: fixes: - "Fixes `bug 1693728`_ which will fix the race condition where after\nevacuation\ \ of an instance to other host user might perform some actions on\nthat instance\ \ which gives wrong instance vm_state to ConfirmEvacuationTask\nthat results\ \ into notification failure.\n\nTo fix this issue, following config option is\ \ added under ``DEFAULT``\nsection in 'masakari.conf' file::\n\n [DEFAULT]\n\ \ host_failure_recovery_threads = 3\n\nThis config option decides the number\ \ of threads going to be used for\nevacuating the instances.\n\n.. _`bug 1693728`:\ \ https://bugs.launchpad.net/masakari/+bug/1693728\n" releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml: features: - 'Added following new REST API''s for masakari operators - 1. GET /v1/segments Returns list of all failover segments. 2. GET /v1/segments/ Returns specific failover segment with uuid. 3. POST /v1/segments Creates a new failover segment 4. PUT /v1/segments/ Updates a failover segment by uuid 5. DELETE /v1/segments/ Delete a failover segment by uuid' releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml: fixes: - 'Fixes an issue where a periodic task in Masakari Engine could loop forever querying Nova API following a failed evacuation. `LP#1897888 `__ ' releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml: fixes: - 'Fixes an issue where failure notification stuck into running status when timeout. `LP#1996835 `__ ' releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml: features: - 'Added following new REST API''s for masakari operators - 1. GET /v1/segments//hosts Returns list of all hosts associated with failover segment. 2. GET /v1/segments//hosts/ Returns specific host from the failover segment with uuid. 3. POST /v1/segments//hosts Creates a new host in failover segment 4. PUT /v1/segments//hosts/ Updates a host in failover segment by uuid 5. DELETE /v1/segments//hosts/ Delete a host from failover segment by uuid' releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml: features: - "Added support to emit event notifications whenever user interacts with\nMasakari\ \ restFul APIs. The emitted notifications are documented at\n`sample_payloads`_.\n\ \nTo enable this feature one should set `driver` config option under the\n`oslo_messaging_notifications`\ \ section as shown below::\n\n [oslo_messaging_notifications]\n driver = log\n\ \nNote: Possible values are `messaging`, `messagingv2`, `routing`, `log`,\n\ `test`, `noop`.\nNotifications can be completely disabled by setting `driver`\ \ value as `noop`\n\n.. _`sample_payloads`: https://docs.openstack.org/masakari/latest/#versioned-notifications\n" releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml: features: - 'Added following new REST API''s related to notifications - 1. GET /v1/notifications Returns list of all notifications. 2. GET /v1/notifications/ Returns specific notification with uuid. 3. POST /v1/notifications Creates a new notification.' releasenotes/notes/policy-in-code-8740d51624055044.yaml: features: - "Masakari now support policy in code, which means if operators doesn't need\ \ to\nmodify any of the default policy rules, they do not need a policy file.\n\ Operators can modify/generate a ``policy.yaml.sample`` file which will override\n\ specific policy rules from their defaults.\n\nMasakari is now configured to\ \ work with two oslo.policy CLI scripts that\nhave been added:\n\n- The first\ \ of these can be called like\n ``oslopolicy-list-redundant --namespace masakari``\ \ and will output a list of\n policy rules in policy.[json|yaml] that match\ \ the project defaults. These\n rules can be removed from the policy file as\ \ they have no effect there.\n- The second script can be called like\n ``oslopolicy-policy-generator\ \ --namespace masakari --output-file policy-merged.yaml``\n and will populate\ \ the policy-merged.yaml file with the effective policy.\n This is the merged\ \ results of project defaults and config file overrides.\n\nNOTE: Default `policy.json`\ \ file is now removed as Masakari now uses default\npolicies. A policy file\ \ is only needed if overriding one of the defaults.\n" releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml: features: - "Added support to record the recovery workflow details of the notification\n\ which will be returned in a new microversion 1.1 in\n`GET /notifications/{notification_id}`\ \ API.\n\nFor example, GET /notifications/ response will\ \ contain\n`recovery_workflow_details` parameter as shown here `notification_details`_\n\ \nAdded a new config section in Masakari conf file for configuring the back\n\ end to be used by taskflow driver::\n\n [taskflow]\n # The back\ \ end for storing recovery_workflow details of the notification.\n #\ \ (string value)\n\n connection = mysql+pymysql://root:admin@127.0.0.1/?charset=utf8\n\n # Where db_name, can be a new database or you\ \ can also specify masakari\n # database.\n\nOperator should run `masakari-manage\ \ db sync` command to add new db tables\nrequired for storing recovery_workflow_details.\n\ \nNote: When you run `masakari-manage db sync`, make sure you have\n`notification_driver=taskflow_driver`\ \ set in masakari.conf.\n\n.. _`notification_details`: https://developer.openstack.org/api-ref/instance-ha/?expanded=show-notification-details-detail#show-notification-details\n" releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml: features: - "Operator can now customize workflows to process each type of failure\nnotifications\ \ (hosts, instance and process) as per their requirements.\nAdded below new\ \ config section for customized recovery flow in a new conf\nfile masakari-custom-recovery-methods.conf\n\ \n- [taskflow_driver_recovery_flows]\n\nUnder [taskflow_driver_recovery_flows]\ \ is added below five new config options\n\n- 'instance_failure_recovery_tasks'\ \ is a dict of tasks which will recover\n instance failure.\n- 'process_failure_recovery_tasks'\ \ is a dict of tasks which will recover\n process failure.\n- 'host_auto_failure_recovery_tasks'\ \ is a dict of tasks which will recover\n host failure for auto recovery.\n\ - 'host_rh_failure_recovery_tasks' is a dict of tasks which will recover\n \ \ host failure for rh recovery on failure host.\n" releasenotes/notes/remove-masakari-wsgi-script-bb737746a17111ab.yaml: upgrade: - 'The WSGI script ``masakari-wsgi`` has been removed. Deployment tooling should instead reference the Python module path for the wsgi module in Masakari, ``masakari.wsgi.api:application`` if their chosen WSGI server supports this (gunicorn, uWSGI, etc.) or implement a ``.wsgi`` script themselves if not (mod_wsgi). ' releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml: features: - 'Implemented workflow for ''reserved_host'' recovery method in case of host failure. Now operator can create or update failover segment with ''reserved_host'' recovery method along with the existing ''auto'' method. When ''reserved_host'' recovery_method is set to a failover segment, operators should also add one or more hosts with reserved flag set as True. ' releasenotes/notes/switch-to-alembic-b438de67c5b22a40.yaml: upgrade: - "The database migration engine has changed from `sqlalchemy-migrate`__ to\n\ `alembic`__. For most deployments, this should have minimal to no impact\nand\ \ the switch should be mostly transparent. The main user-facing impact is\n\ the change in schema versioning. While sqlalchemy-migrate used a linear,\ninteger-based\ \ versioning scheme, which required placeholder migrations to\nallow for potential\ \ migration backports, alembic uses a distributed version\ncontrol-like schema\ \ where a migration's ancestor is encoded in the file and\nbranches are possible.\ \ The alembic migration files therefore use a\narbitrary UUID-like naming scheme\ \ and the ``masakari-manage db_sync``\ncommand now expects such an version when\ \ manually specifying the version\nthat should be applied. For example::\n\n\ \ $ masakari-manage db sync c6214ca60943\n\nAttempting to specify an sqlalchemy-migrate-based\ \ version will result in an\nerror.\n\n.. __: https://sqlalchemy-migrate.readthedocs.io/en/latest/\n\ .. __: https://alembic.sqlalchemy.org/en/latest/\n" releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml: upgrade: - 'WSGI application script ``masakari-wsgi`` is now available. It allows running the masakari APIs using a WSGI server of choice (for example nginx and uwsgi, apache2 with mod_proxy_uwsgi or gunicorn). The eventlet-based servers are still available, but the WSGI options will allow greater deployment flexibility. ' notes: - files: - - releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml - !!binary | MzAwZmE3MzJkYzI3ZmRiM2FmMGQ4N2I0MDQ5ZWY1OWM1YWE4YjkyOA== - - releasenotes/notes/drop-python-38-39-deab0b81006bae48.yaml - !!binary | YzM5YjliZDdmZGM2NmU2ODQ1NjcyMjRmOWNkNjQ2MzRjODJkOTY3MA== - - releasenotes/notes/remove-masakari-wsgi-script-bb737746a17111ab.yaml - !!binary | Y2NhZWNkMzI1Zjg2YjQ2Zjg2Mjc0ZmRlYWZhNjYxYWVlNDM2YTQzNA== version: 19.0.0-12 - files: - - releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml - !!binary | Mjc5NzdjMTBlZWQyNGZiMzI3YTFlMmM4NmZkMTJlNzI2MTMwODNjMA== - - releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml - !!binary | ZDkwNjU0MWUyMTI0ODE0OGMyZGUxYjc5MDAwMDZiNTE2NDU0MzI4OA== - - releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml - !!binary | MmUyNjdlNjYwNDRjY2M0ZjRmZDM3Y2E1YzBmM2ZhYTg3MWZmYzNmYg== - - releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml - !!binary | NDRmNzY5OWU0ODUxMDkxNmI4MDJiODE0YjFiMGZkN2Q2NGM4MWYzZA== version: 2.0.0 - files: - - releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml - !!binary | MDBiYzU2MTJhODM0ODIzZDIxMTkyYTBmYzMyOGI1M2NjZjQ0YjBiZQ== - - releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml - !!binary | NzdmNTFjNTFhOTM3ZDBlZDlmMWYzMTUyMjU5M2JhZTI2ZjE1ZTA0YQ== - - releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml - !!binary | ZDQ1Zjc1NGNiYjZjYTczZWJlY2NmODk2YjhhNDhiZDQ2ODJlMWNkYw== - - releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml - !!binary | NzdmNTFjNTFhOTM3ZDBlZDlmMWYzMTUyMjU5M2JhZTI2ZjE1ZTA0YQ== - - releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml - !!binary | M2YyNTRlMzJhOTQ1Yjk1MDQ3MTdhMWY5MWM1ZWFhMjRiMTU0ZGY1Mg== version: 3.0.0.0rc1 - files: - - releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml - !!binary | YTY3ZTBjNTAzODVhMTg5MDA2NTY4ODE3ZmYxMTJjMjg2YTAwNDhhYQ== - - releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yaml - !!binary | NDg2MTQxMzY4MmQ2MjIwN2UzNDM5M2M2ZjU2MjlmZTY2YzYyNjE3OQ== - - releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml - !!binary | NjEzOWRhMjhkYTJhZGMyNTg2YjM0ZmY5YTZhOWNiNTYxYjdmNDBmYg== - - releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml - !!binary | NjIxMjhlY2Y0YjExMWY1ZGI5ZDBlODFlOTU4ZTkyNDIyMDg1NmExNA== - - releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml - !!binary | MjVkMzNkMmNiMWVlYzI3MTIyNzMwOWEzNGE0NWIwZDNmMDk4N2Q1MA== version: 4.0.0 - files: - - releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml - !!binary | NDI5OWIzODg4M2M3YzcxMWZmM2UzNDlmNWIxMzRiNmM5YTI3MmNhZg== - - releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml - !!binary | NDA0OGIxZmQ4ZWFlMDY1NjUyMTA1ZDE5ODkyMDcxYjBhNGZhNTUzMw== - - releasenotes/notes/policy-in-code-8740d51624055044.yaml - !!binary | ZDc1OTJjYmUyNTRlOTMzN2VkMDhkNTQ1NGQwZmJkODg0MDRkZGMwZA== - - releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml - !!binary | YWQzZGM3MzdjOTg0YzI2Nzk4MGU3NDc5YWNjMmJmODg1NmI1NTZkNQ== - - releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml - !!binary | NWJiZDc4ZTMyNmU3NzI2MjI5YmI5NGY4ODdmMThmOGIyN2JiN2ExNA== version: 6.0.0 - files: - - releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml - !!binary | NzcwMGNkZDNiZjRjNDdjOTdiODU4NzQyOGMzNTIzOTAyNzE2OTJiMg== - - releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml - !!binary | MDYxNmI0YmQ5OTI3MGM5YjRiYjFkMWRmMTdmYWQxY2IzYTUwNDIxYg== - - releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml - !!binary | NzMyMWVlMzJmZGM0N2M0MDhhMTMxMjBiMWQ3NmYxMTg1OTE5MGIyZQ== version: 7.0.0 - files: - - releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml - !!binary | MzI3MWE3Yzc0NDZjYjMyYmQzOTc1YjRhMGQ1MGFjZWVjMWExMjI4ZQ== - - releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml - !!binary | OTI4MDU2MDczMWM5MWNmYmY1MmMwYWZkNTQ2NTM0YmExMTNlODNlNQ== - - releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml - !!binary | YmJlYWQ4NTI0N2JkYzk3MDU1NWIyNTc5ZGNkM2Q0Y2IzYTNjY2ViYg== - - releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml - !!binary | MzI3MWE3Yzc0NDZjYjMyYmQzOTc1YjRhMGQ1MGFjZWVjMWExMjI4ZQ== - - releasenotes/notes/compute_search-3da97e69e661a73f.yaml - !!binary | OWVmZjA4YzdhODA0MWMyOGE1ZjljNWVjYjJiOTkxNWU3ZjQyY2U4Yw== version: 8.1.2 - files: - - releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml - !!binary | ZTkxNWFkZTg1YWQyN2E5ZGU3MGQ1OTM3N2E5YTI1NWYyMWM2YzhkZA== - - releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml - !!binary | ZTkxNWFkZTg1YWQyN2E5ZGU3MGQ1OTM3N2E5YTI1NWYyMWM2YzhkZA== - - releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml - !!binary | MzE1MTEzZDk3NDk4MDU2OTgwMDM4OTJkYmFhNDJkZWJiYzU0OTM1NA== version: 8.1.1 - files: - - releasenotes/notes/bug-1932194-2b721860bbc26819.yaml - !!binary | Yjk2NWYzYTI1M2U3ZWNhMDhhYzc2NWJlOTBiN2MxYWUzZDg2MzY5Zg== version: 9.1.3 - files: - - releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml - !!binary | YWFiZGNkYTk4YmViNDlmZmYxZjhhZGU4YmYyZmUzYzIwMWY0YTA1Mg== - - releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml - !!binary | YjVlY2ZkZTZkMmJkMDQzMTg4ZTIyMGNkYzg4ZmQ5NmZlOGU5M2E3Zg== - - releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml - !!binary | MzE0MDI2Y2UxNTZhZjdiNGQ2NzRkOTk0N2U3ZDY0ODUwMjA4Mjg5Mw== - - releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml - !!binary | YWFiZGNkYTk4YmViNDlmZmYxZjhhZGU4YmYyZmUzYzIwMWY0YTA1Mg== - - releasenotes/notes/compute_search-3da97e69e661a73f.yaml - !!binary | MzU1MTljMGNlMDIwOTJhYWVmNmI4MDI4MTdiNzRkMDE3YTg0YjA4Yg== version: 9.1.2 - files: - - releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml - !!binary | NDU4Njk4ZmY3NDIwNWFmMjljMjJmZDdmMjBmODcwOTE2ZWFmMWIxNg== - - releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml - !!binary | NDU4Njk4ZmY3NDIwNWFmMjljMjJmZDdmMjBmODcwOTE2ZWFmMWIxNg== - - releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml - !!binary | MzIyYmFhYTRjMjNjYjc3MzhhOWM4MjYwYTMzMzYwN2ViMjBiNTg0Yw== version: 9.1.1 - files: - - releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml - !!binary | ZDQ2ZWE2YjExMDMyZWI0NjI4ZDhjZjljNzUzM2UxMmQ4MDRhODAwNA== version: 9.0.0 - files: - - releasenotes/notes/bug-1932194-2b721860bbc26819.yaml - !!binary | OWExMWNkM2Q2MzQyZDU3NmI5MjIxYTFlNGY0OWFiZGYwNWFlNzdhOA== version: 10.0.3 - files: - - releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml - !!binary | NjRkYjBmZTg3YzZkOTc5NzhjNDYwMzMyMTJkZjA0ODhhYzdjZmVkOQ== - - releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml - !!binary | OWFmNTg0YzVjOTJhZGFmOWIxNzIzNTFhZGE5YmZhZWQyNGQ0ZWYzNg== - - releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml - !!binary | YjZlMjkxNjFiZTQ4MWY4NDMzNGNkZGE4Y2E1YjU2MWJkNDc2M2U5ZA== - - releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml - !!binary | NjRkYjBmZTg3YzZkOTc5NzhjNDYwMzMyMTJkZjA0ODhhYzdjZmVkOQ== version: 10.0.2 - files: - - releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml - !!binary | ZGEyYTVmMDE1MzRkMjI4MzExZjVkNWZhN2IzMWM5OTE3ODcwYTU2Mw== - - releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml - !!binary | ZGEyYTVmMDE1MzRkMjI4MzExZjVkNWZhN2IzMWM5OTE3ODcwYTU2Mw== - - releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml - !!binary | ODAyZDg4YjI4YTFlYmY4OTY2ZjQ1YTdmZjM5YzY4NjgzNmJhMDY3Yw== version: 10.0.1 - files: - - releasenotes/notes/compute_search-3da97e69e661a73f.yaml - !!binary | NDMyMjk2OGI4OTNiMjQyZjIyOTkxMmMyYjcwZTM4OTVmMDIyNzQwMg== - - releasenotes/notes/customisable-ha-enabled-instance-metadata-key-af511ea2aac96690.yaml - !!binary | NDM5NzA4OGRhNzFkM2E1ZDc1YTYxYjQ0ZjBlYzRlMGU3MTFjYmU5Nw== version: 10.0.0 - files: - - releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml - !!binary | ZTNmNmU3ODg0NzU2ODEyMTUyOWQyNGQxZmQ3MWIxNjZjZWU5NjVhZg== version: 11.0.3 - files: - - releasenotes/notes/bug-1980736-975ee013e4612062.yaml - !!binary | MTY4MGE3MzIwYTM2ZmFhNDYzZDNlNmIwOThiMjZmNDc4YTMyNDk5Ng== version: 11.0.2 - files: - - releasenotes/notes/bug-1932194-2b721860bbc26819.yaml - !!binary | ZTNjYmNkMjViOWEzMTI4NjkwMWVjMzE1OWVjYzg1NjM3MTgzMDIzYw== version: 11.0.1 - files: - - releasenotes/notes/blueprint-support-nova-system-scope-policies-c4dbd244dd3fcf1a.yaml - !!binary | MGE1YWU4YjQwMjY3ZGMzNTNkM2E0ZTNkMTc1NGQzNmU3N2JmMGYwNg== - - releasenotes/notes/bug-1685145-3d93145bfc76c660.yaml - !!binary | MzA4NDJmYWVhYTUyZDQwZjZhMDA3NjVlNzk3ZTNhMTczYTE1ZjQ0MQ== - - releasenotes/notes/bug-1776385-0bcf0a0b3fad359e.yaml - !!binary | ZDdmNDdmMjYyNzk5YjI4NTA4MGNiMDg4MzA1NjA4OTFjNTMyYTVhMQ== - - releasenotes/notes/bug-1782517-e4dc70bad9e4e131.yaml - !!binary | N2Y3NjA4MWNjZjE2NzE0MWJlMDdhMWZhMWI0NmI5MmQyZGEyY2VlMQ== - - releasenotes/notes/bug-1856164-6601a6e6280eba4d.yaml - !!binary | ZDE4NTBkZjY3NGFhMzM5MDQxM2VhZDliMDdmYWEyYmE1ZDdkYmVhNA== - - releasenotes/notes/bug-1859406-6b041a26acf6c7f6.yaml - !!binary | N2Y3NjA4MWNjZjE2NzE0MWJlMDdhMWZhMWI0NmI5MmQyZGEyY2VlMQ== - - releasenotes/notes/bug-1882516-e8dc7fd2b55f065f.yaml - !!binary | MzA4NDJmYWVhYTUyZDQwZjZhMDA3NjVlNzk3ZTNhMTczYTE1ZjQ0MQ== - - releasenotes/notes/deprecate-json-formatted-policy-file-57ad537ec19cc7e0.yaml - !!binary | NTEzYzI5MDc4ZmZhNTJmNTBiYzc1NTI4Yzg5MWVlZjJlNzdlZGE5OQ== - - releasenotes/notes/enabled-to-segment-7e6184feb1e4f818.yaml - !!binary | ZmU4OGVhZTljYmVhZDA3OWZkNGQxOGNmNzk4OTBlMzA4YmZiYzEzMw== - - releasenotes/notes/fix-endless-periodic-f223845f3044b166.yaml - !!binary | ZGY2MzcxNGIwM2NlZjU5MDY3OTFmMTRiYmYyNDE2YmVhYjIzY2EwYQ== version: 11.0.0 - files: - - releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml - !!binary | NGY2YTc0MGMyYzQwNzI5MTIyNDQ2YTI3NjYxMjczNjM3OTlkZjJiMA== version: 12.0.3 - files: - - releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml - !!binary | YTViNzU2NGY0NDUzZTdlYzdkZjQwMjFhODUyZWI0MGIwYmQ2MzFjYQ== version: 12.0.2 - files: - - releasenotes/notes/bug-1980736-975ee013e4612062.yaml - !!binary | ZWJmYmQ2ZGU0MzU3Yjg5ZDgzNmU5ZmRiMWJiM2IzNTY1ZGNlMDIzYg== version: 12.0.1 - files: - - releasenotes/notes/bug-1932194-2b721860bbc26819.yaml - !!binary | NWM2ZmQ0NDUwNGU0N2RkYmVjM2UwYzI5ZDQ1NGRhZWRhODdmODZhNA== - - releasenotes/notes/compute-disable-reason-9570734c0bb888cf.yaml - !!binary | Yzg2MTQzN2I1MmNhMGM0MTVjNDIzOWRiZjg2ZGEyNTg3N2UwOTU0ZQ== version: 12.0.0 - files: - - releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml - !!binary | OWQxYjllM2U5YmQyZDg4NzQ5ZWFlMjc5ZDY1MTQ5MzAxZjNkYjVhOQ== version: 13.0.3 - files: - - releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml - !!binary | Zjg0NGI4ZjQ4ZTc1YzA4NTNiMTkzMjRkMDAwYTAyMWM1ZjAyZDM4NQ== version: 13.0.2 - files: - - releasenotes/notes/bug-1980736-975ee013e4612062.yaml - !!binary | ZjJjNGFhZGE3OWI0NTU4MzZhYzczM2M0MWRhZWE0NzRkYzU5NzllMw== version: 13.0.1 - files: - - releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml - !!binary | ZTQ5NmFhZDBlY2M4NGJlYjNiMDAwMDE5YmY0NWIyNDRlNmJmNWZhNg== version: 14.0.2 - files: - - releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml - !!binary | YzgxM2ZhOTYyMmQ0ZjQxZDhkZTU0MjIzNzM2OGE1ZjM0ZTZiYzM3MQ== version: 14.0.1 - files: - - releasenotes/notes/bug-1980736-975ee013e4612062.yaml - !!binary | NzI0MTEwMTE2MzU1MGIzY2Q5ODEyNjI0NjhkYzUyM2NjYjI1Y2M5ZQ== - - releasenotes/notes/coordination_for_host_notification-a156ec5a5839a781.yaml - !!binary | OWZlZjg4MDdjZmViNzkzZTgzODcyN2MyMjdlM2FjYTRmZWU5YWE0ZA== version: 14.0.0 - files: - - releasenotes/notes/bug-1960619-4c2cc73483bdff86.yaml - !!binary | MDgzN2Q3Nzg3YzFiZTM1ZGY0YjQzMzBkYmU2Y2RkMmMxNjBhNjlhYg== - - releasenotes/notes/fix-notification-stuck-problem-fdb84bad8641384b.yaml - !!binary | N2VjM2VkZGExYWRhOWMyNDY0ZDc5Yzg0YjBmZDFkMWJlMjJmOTMzNg== version: 15.0.0 - files: - - releasenotes/notes/blueprint-add-vmoves-348fd430aa936721.yaml - !!binary | MTdlYmFkNTM5NzlmZjU5MGVmNzBjNjczNjZkNGVhM2FiNzFkMDg5Ng== version: 16.0.0 - files: - - releasenotes/notes/switch-to-alembic-b438de67c5b22a40.yaml - !!binary | NjgyM2VhNWVkMWNlYjBhNGI2OWJmNDVmMDU2Njk2MmM2OGQ2MzQxZg== version: 17.0.0 - files: - - releasenotes/notes/add-masakari-wsgi-module-a5f5a649a2ec460c.yaml - !!binary | MGZkMzRkZDZhNmQ5MDUyNWRiZjgwNmYzNTU3N2M1ZWUxZDdlOTQ0NA== version: 19.0.0-3 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml0000664000175100017510000000061015033036143030472 0ustar00mylesmyles--- features: - | Implemented workflow for 'reserved_host' recovery method in case of host failure. Now operator can create or update failover segment with 'reserved_host' recovery method along with the existing 'auto' method. When 'reserved_host' recovery_method is set to a failover segment, operators should also add one or more hosts with reserved flag set as True. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/switch-to-alembic-b438de67c5b22a40.yaml0000664000175100017510000000210215033036143025650 0ustar00mylesmyles--- upgrade: - | The database migration engine has changed from `sqlalchemy-migrate`__ to `alembic`__. For most deployments, this should have minimal to no impact and the switch should be mostly transparent. The main user-facing impact is the change in schema versioning. While sqlalchemy-migrate used a linear, integer-based versioning scheme, which required placeholder migrations to allow for potential migration backports, alembic uses a distributed version control-like schema where a migration's ancestor is encoded in the file and branches are possible. The alembic migration files therefore use a arbitrary UUID-like naming scheme and the ``masakari-manage db_sync`` command now expects such an version when manually specifying the version that should be applied. For example:: $ masakari-manage db sync c6214ca60943 Attempting to specify an sqlalchemy-migrate-based version will result in an error. .. __: https://sqlalchemy-migrate.readthedocs.io/en/latest/ .. __: https://alembic.sqlalchemy.org/en/latest/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml0000664000175100017510000000053615033036143026032 0ustar00mylesmyles--- upgrade: - | WSGI application script ``masakari-wsgi`` is now available. It allows running the masakari APIs using a WSGI server of choice (for example nginx and uwsgi, apache2 with mod_proxy_uwsgi or gunicorn). The eventlet-based servers are still available, but the WSGI options will allow greater deployment flexibility. ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/releasenotes/source/0000775000175100017510000000000015033036146017422 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/2023.1.rst0000664000175100017510000000021015033036143020667 0ustar00mylesmyles=========================== 2023.1 Series Release Notes =========================== .. release-notes:: :branch: unmaintained/2023.1 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/2023.2.rst0000664000175100017510000000020215033036143020671 0ustar00mylesmyles=========================== 2023.2 Series Release Notes =========================== .. release-notes:: :branch: stable/2023.2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/2024.1.rst0000664000175100017510000000020215033036143020671 0ustar00mylesmyles=========================== 2024.1 Series Release Notes =========================== .. release-notes:: :branch: stable/2024.1 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/2024.2.rst0000664000175100017510000000020215033036143020672 0ustar00mylesmyles=========================== 2024.2 Series Release Notes =========================== .. release-notes:: :branch: stable/2024.2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/2025.1.rst0000664000175100017510000000020215033036143020672 0ustar00mylesmyles=========================== 2025.1 Series Release Notes =========================== .. release-notes:: :branch: stable/2025.1 ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/releasenotes/source/_static/0000775000175100017510000000000015033036146021050 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/_static/.placeholder0000664000175100017510000000000015033036143023316 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/releasenotes/source/_templates/0000775000175100017510000000000015033036146021557 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/_templates/.placeholder0000664000175100017510000000000015033036143024025 0ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/conf.py0000664000175100017510000002132215033036143020716 0ustar00mylesmyles# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # Masakari Release Notes documentation build configuration file, created by # sphinx-quickstart on Tue Jun 28 9:58 AM 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'openstackdocstheme', 'reno.sphinxext', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Masakari Release Notes' copyright = '2016, OpenStack Foundation' # Release notes are version independent. # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # openstackdocstheme options openstackdocs_repo_name = 'openstack/masakari' openstackdocs_bug_project = 'masakari' openstackdocs_auto_name = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. html_use_index = False # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'MasakariReleaseNotesdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'MasakariReleaseNotes.tex', 'Masakari Release Notes Documentation', 'Masakari Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'masakarireleasenotes', 'Masakari Release Notes Documentation', ['Masakari Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'MasakariReleaseNotes', 'Masakari Release Notes Documentation', 'Masakari Developers', 'MasakariReleaseNotes', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/index.rst0000664000175100017510000000050315033036143021256 0ustar00mylesmylesWelcome to Masakari Release Notes documentation! ================================================ Contents ======== .. toctree:: :maxdepth: 1 unreleased 2025.1 2024.2 2024.1 2023.2 2023.1 zed yoga xena wallaby victoria ussuri train stein rocky queens pike ocata ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/ocata.rst0000664000175100017510000000021115033036143021232 0ustar00mylesmyles=========================== Ocata Series Release Notes =========================== .. release-notes:: :branch: origin/stable/ocata ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/pike.rst0000664000175100017510000000020515033036143021076 0ustar00mylesmyles========================== Pike Series Release Notes ========================== .. release-notes:: :branch: origin/stable/pike ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/queens.rst0000664000175100017510000000021515033036143021447 0ustar00mylesmyles============================ Queens Series Release Notes ============================ .. release-notes:: :branch: origin/stable/queens ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/rocky.rst0000664000175100017510000000022115033036143021273 0ustar00mylesmyles=================================== Rocky Series Release Notes =================================== .. release-notes:: :branch: stable/rocky ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/stein.rst0000664000175100017510000000022115033036143021266 0ustar00mylesmyles=================================== Stein Series Release Notes =================================== .. release-notes:: :branch: stable/stein ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/train.rst0000664000175100017510000000017615033036143021272 0ustar00mylesmyles========================== Train Series Release Notes ========================== .. release-notes:: :branch: stable/train ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/unreleased.rst0000664000175100017510000000016015033036143022275 0ustar00mylesmyles============================== Current Series Release Notes ============================== .. release-notes:: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/ussuri.rst0000664000175100017510000000020215033036143021475 0ustar00mylesmyles=========================== Ussuri Series Release Notes =========================== .. release-notes:: :branch: stable/ussuri ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/victoria.rst0000664000175100017510000000022015033036143021763 0ustar00mylesmyles============================= Victoria Series Release Notes ============================= .. release-notes:: :branch: unmaintained/victoria ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/wallaby.rst0000664000175100017510000000021415033036143021601 0ustar00mylesmyles============================ Wallaby Series Release Notes ============================ .. release-notes:: :branch: unmaintained/wallaby ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/xena.rst0000664000175100017510000000020015033036143021074 0ustar00mylesmyles========================= Xena Series Release Notes ========================= .. release-notes:: :branch: unmaintained/xena ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/yoga.rst0000664000175100017510000000020015033036143021100 0ustar00mylesmyles========================= Yoga Series Release Notes ========================= .. release-notes:: :branch: unmaintained/yoga ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/source/zed.rst0000664000175100017510000000017415033036143020735 0ustar00mylesmyles======================== Zed Series Release Notes ======================== .. release-notes:: :branch: unmaintained/zed ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/releasenotes/templates/0000775000175100017510000000000015033036146020120 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/templates/feature.yml0000664000175100017510000000041715033036143022275 0ustar00mylesmyles--- features: - | Implements [some feature]. [Can be described using multiple sentences if necessary.] [Limitations worth mentioning can be included as well.] `Blueprint [blueprint id] `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/releasenotes/templates/fix.yml0000664000175100017510000000034615033036143021431 0ustar00mylesmyles--- fixes: - | Fixes [some bug]. [Can be described using multiple sentences if necessary.] [Possibly also giving the previous behaviour description.] `LP#[bug number] `__ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/requirements.txt0000664000175100017510000000150415033036143016712 0ustar00mylesmylesalembic>=1.8.0 # MIT iso8601>=0.1.11 # MIT jsonschema>=3.2.0 # MIT keystoneauth1>=3.4.0 # Apache-2.0 keystonemiddleware>=4.17.0 # Apache-2.0 WebOb>=1.7.1 # MIT microversion-parse>=0.2.1 # Apache-2.0 oslo.config>=5.2.0 # Apache-2.0 oslo.context>=2.19.2 # Apache-2.0 oslo.db>=4.44.0 # Apache-2.0 oslo.messaging>=14.1.0 # Apache-2.0 oslo.i18n>=3.15.3 # Apache-2.0 oslo.log>=3.36.0 # Apache-2.0 oslo.middleware>=3.31.0 # Apache-2.0 oslo.policy>=4.5.0 # Apache-2.0 oslo.service!=1.28.1,>=1.24.0 # Apache-2.0 oslo.upgradecheck>=1.3.0 # Apache-2.0 oslo.utils>=4.7.0 # Apache-2.0 oslo.versionedobjects>=1.31.2 # Apache-2.0 pbr!=2.1.0,>=2.0.0 # Apache-2.0 python-novaclient>=9.1.0 # Apache-2.0 stevedore>=1.20.0 # Apache-2.0 SQLAlchemy>=1.2.19 # MIT SQLAlchemy-Utils>=0.33.10 # Apache-2.0 taskflow>=2.16.0 # Apache-2.0 tooz>=2.10.1 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52071 masakari-19.1.0.dev18/roles/0000775000175100017510000000000015033036146014555 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.52071 masakari-19.1.0.dev18/roles/devstack-config/0000775000175100017510000000000015033036146017624 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/roles/devstack-config/tasks/0000775000175100017510000000000015033036146020751 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/roles/devstack-config/tasks/main.yml0000664000175100017510000000064715033036143022424 0ustar00mylesmyles--- - name: Collect devstack stackenv file fetch: flat: true dest: "{{ zuul.executor.log_root }}/{{ inventory_hostname }}/confs/devstack/-stackenv" src: "/opt/stack/devstack/.stackenv" - name: Collect devstack config files synchronize: dest: "{{ zuul.executor.log_root }}/{{ inventory_hostname }}/confs/devstack/" mode: pull src: "/opt/stack/devstack/{{ item }}" with_items: - local.conf ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53671 masakari-19.1.0.dev18/setup.cfg0000664000175100017510000000561115033036146015255 0ustar00mylesmyles[metadata] name = masakari summary = Virtual Machine High Availability (VMHA) service for OpenStack description_file = README.rst author = OpenStack author_email = openstack-discuss@lists.openstack.org url = https://docs.openstack.org/masakari/latest/ python_requires = >=3.10 classifier = Development Status :: 5 - Production/Stable Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 Programming Language :: Python :: 3 :: Only Programming Language :: Python :: Implementation :: CPython [files] data_files = etc/masakari = etc/masakari/api-paste.ini etc/masakari/masakari-custom-recovery-methods.conf packages = masakari [entry_points] oslo.config.opts = masakari.conf = masakari.conf.opts:list_opts customized_recovery_flow_opts = masakari.conf.opts:list_recovery_workflow_opts oslo.config.opts.defaults = masakari.api = masakari.common.config:set_lib_defaults oslo.policy.enforcer = masakari = masakari.policy:get_enforcer oslo.policy.policies = masakari = masakari.policies:list_rules console_scripts = masakari-api = masakari.cmd.api:main masakari-engine = masakari.cmd.engine:main masakari-manage = masakari.cmd.manage:main masakari-status = masakari.cmd.status:main masakari.api.v1.extensions = versions = masakari.api.openstack.ha.versionsV1:Versions extension_info = masakari.api.openstack.ha.extension_info:ExtensionInfo segments = masakari.api.openstack.ha.segments:Segments hosts = masakari.api.openstack.ha.hosts:Hosts notifications = masakari.api.openstack.ha.notifications:Notifications vmoves = masakari.api.openstack.ha.vmoves:VMoves masakari.driver = taskflow_driver = masakari.engine.drivers.taskflow:TaskFlowDriver masakari.task_flow.tasks = disable_compute_service_task = masakari.engine.drivers.taskflow.host_failure:DisableComputeServiceTask prepare_HA_enabled_instances_task = masakari.engine.drivers.taskflow.host_failure:PrepareHAEnabledInstancesTask evacuate_instances_task = masakari.engine.drivers.taskflow.host_failure:EvacuateInstancesTask stop_instance_task = masakari.engine.drivers.taskflow.instance_failure:StopInstanceTask start_instance_task = masakari.engine.drivers.taskflow.instance_failure:StartInstanceTask confirm_instance_active_task = masakari.engine.drivers.taskflow.instance_failure:ConfirmInstanceActiveTask disable_compute_node_task = masakari.engine.drivers.taskflow.process_failure:DisableComputeNodeTask confirm_compute_node_disabled_task = masakari.engine.drivers.taskflow.process_failure:ConfirmComputeNodeDisabledTask no_op = masakari.engine.drivers.taskflow.no_op:Noop [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/setup.py0000664000175100017510000000127115033036143015141 0ustar00mylesmyles# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/test-requirements.txt0000664000175100017510000000076515033036143017677 0ustar00mylesmyleshacking>=6.1.0,<6.2.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 ddt>=1.0.1 # MIT doc8>=0.6.0 # Apache-2.0 pep8>=1.5.7 psycopg2>=2.8 # LGPL/ZPL PyMySQL>=0.7.6 # MIT License python-subunit>=1.0.0 # Apache-2.0/BSD openstacksdk>=0.35.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 stestr>=1.0.0 # Apache-2.0 SQLAlchemy-Utils>=0.33.10 # Apache-2.0 requests-mock>=1.2.0 # Apache-2.0 testresources>=2.0.0 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT yamllint>=1.22.0 # GPLv3 ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1751923813.53571 masakari-19.1.0.dev18/tools/0000775000175100017510000000000015033036146014571 5ustar00mylesmyles././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/tools/test-setup.sh0000775000175100017510000000353415033036143017247 0ustar00mylesmyles#!/bin/bash -xe # This script will be run by OpenStack CI before unit tests are run, # it sets up the test system as needed. # Developers should setup their test systems in a similar way. # This setup needs to be run as a user that can run sudo. # The root password for the MySQL database; pass it in via # MYSQL_ROOT_PW. DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave} # This user and its password are used by the tests, if you change it, # your tests might fail. DB_USER=openstack_citest DB_PW=openstack_citest sudo -H mysqladmin -u root password $DB_ROOT_PW # It's best practice to remove anonymous users from the database. If # an anonymous user exists, then it matches first for connections and # other connections from that host will not work. sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e " DELETE FROM mysql.user WHERE User=''; FLUSH PRIVILEGES; CREATE USER '$DB_USER'@'%' IDENTIFIED BY '$DB_PW'; GRANT ALL PRIVILEGES ON *.* TO '$DB_USER'@'%' WITH GRANT OPTION;" # Now create our database. mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e " SET default_storage_engine=MYISAM; DROP DATABASE IF EXISTS openstack_citest; CREATE DATABASE openstack_citest CHARACTER SET utf8;" # Same for PostgreSQL # Setup user root_roles=$(sudo -H -u postgres psql -t -c " SELECT 'HERE' from pg_roles where rolname='$DB_USER'") if [[ ${root_roles} == *HERE ]];then sudo -H -u postgres psql -c "ALTER ROLE $DB_USER WITH SUPERUSER LOGIN PASSWORD '$DB_PW'" else sudo -H -u postgres psql -c "CREATE ROLE $DB_USER WITH SUPERUSER LOGIN PASSWORD '$DB_PW'" fi # Store password for tests cat << EOF > $HOME/.pgpass *:*:*:$DB_USER:$DB_PW EOF chmod 0600 $HOME/.pgpass # Now create our database psql -h 127.0.0.1 -U $DB_USER -d template1 -c "DROP DATABASE IF EXISTS openstack_citest" createdb -h 127.0.0.1 -U $DB_USER -l C -T template0 -E utf8 openstack_citest ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1751923811.0 masakari-19.1.0.dev18/tox.ini0000664000175100017510000001244715033036143014751 0ustar00mylesmyles[tox] minversion = 4.6.0 envlist = pep8,py3 [testenv] usedevelop = True setenv = LANGUAGE=en_US LC_ALL=en_US.utf-8 deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = stestr run {posargs} passenv = HTTP_PROXY HTTPS_PROXY NO_PROXY OS_DEBUG GENERATE_HASHES [testenv:functional] commands = stestr --test-path=./masakari/tests/functional run --concurrency=1 --slowest {posargs} [testenv:genconfig] commands = oslo-config-generator --config-file=etc/masakari/masakari-config-generator.conf oslo-config-generator --config-file=etc/masakari/masakari-customized-recovery-flow-config-generator.conf [testenv:genpolicy] commands = oslopolicy-sample-generator --config-file=etc/masakari/masakari-policy-generator.conf [testenv:linters] skip_install = True deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt {[testenv:bashate]deps} commands = {[testenv:pep8]commands} {[testenv:doc8]commands} {[testenv:yamllint]commands} {[testenv:bashate]commands} [testenv:pep8] skip_install = True deps = {[testenv:linters]deps} commands = flake8 {posargs} [testenv:doc8] skip_install = True deps = {[testenv:linters]deps} commands = doc8 README.rst CONTRIBUTING.rst HACKING.rst doc/source doc8 releasenotes/source doc8 -e '.yaml' releasenotes/notes doc8 -e '.rst' -e '.inc' api-ref/source [testenv:yamllint] skip_install = True deps = {[testenv:linters]deps} commands = yamllint -s . [testenv:bashate] skip_install = True deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} bashate commands = bashate devstack/plugin.sh -v --ignore E006 --error E005,E042,E043 [testenv:venv] commands = {posargs} [testenv:cover] setenv = VIRTUAL_ENV={envdir} PYTHON=coverage run --source masakari --parallel-mode commands = stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml [testenv:docs] # NOTE(elod.illes): requirements.txt is needed because otherwise # dependencies are installed during 'develop-inst' tox phase without # constraints which could cause failures in stable branches. deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/requirements.txt -r{toxinidir}/doc/requirements.txt commands = sphinx-build -W -b html doc/source doc/build/html [testenv:pdf-docs] deps = {[testenv:docs]deps} allowlist_externals = make commands = sphinx-build -W -b latex doc/source doc/build/pdf make -C doc/build/pdf [testenv:releasenotes] skip_install = True deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/doc/requirements.txt allowlist_externals = rm commands = rm -rf releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:debug] commands = oslo_debug_helper {posargs} [testenv:api-ref] # This environment is called from CI scripts to test and publish # the API Ref to docs.openstack.org. deps = {[testenv:docs]deps} allowlist_externals = rm commands = rm -rf api-ref/build sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html [flake8] show-source = True # E123, E125 skipped as they are invalid PEP-8. # The below hacking rules by default are disabled should be enabled: # [H106] Don't put vim configuration in source files. # [H203] Use assertIs(Not)None to check for None. # [E231] missing whitespace after ',' # [H904] Delay string interpolations at logging calls. enable-extensions = H106,H203,H904 # [W504] line break after binary operator (use W503 instead) ignore = E123,E125,E128,E231,E731,H405,W504 builtins = _ exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build [hacking] import_exceptions = masakari.i18n [flake8:local-plugins] extension = M301 = checks:no_db_session_in_public_api M302 = checks:use_timeutils_utcnow M303 = checks:capital_cfg_help M305 = checks:assert_true_instance M306 = checks:assert_equal_type M308 = checks:no_translate_logs M309 = checks:no_import_translation_in_tests M310 = checks:no_setting_conf_directly_in_tests M315 = checks:no_mutable_default_args M316 = checks:check_explicit_underscore_import M317 = checks:use_jsonutils M318 = checks:assert_true_or_false_with_in M319 = checks:assert_raises_regexp M320 = checks:dict_constructor_with_list_copy M321 = checks:assert_equal_in M322 = checks:check_greenthread_spawns M323 = checks:check_no_contextlib_nested M324 = checks:check_config_option_in_central_place M325 = checks:check_doubled_words M326 = checks:check_python3_no_iteritems M327 = checks:check_python3_no_iterkeys M328 = checks:check_python3_no_itervalues M329 = checks:no_os_popen M331 = checks:no_log_warn M332 = checks:yield_followed_by_space M333 = checks:check_policy_registration_in_central_place M334 = checks:check_policy_enforce paths = ./masakari/hacking [testenv:bindep] skip_install = True deps = bindep commands = bindep test [doc8] # NOTE(yoctozepto): this is due to multiple violations - it is better to keep # it limited sanely rather than disable the D001 "Line too long" rule altogether max-line-length = 105