From bb1ac51679f9f7b0c34c9f412dfe881eca53b3ac Mon Sep 17 00:00:00 2001 From: anish-mudaraddi Date: Fri, 10 Jan 2025 13:27:38 +0000 Subject: [PATCH] Delete rabbit consumer chart and source code these have been moved over to cloud-helm-charts and cloud-docker-images respectively --- .github/workflows/rabbit_consumer.yaml | 127 ---- .github/workflows/rabbit_consumer_chart.yaml | 45 -- OpenStack-Rabbit-Consumer/.pylintrc | 617 ------------------ OpenStack-Rabbit-Consumer/Dockerfile | 33 - OpenStack-Rabbit-Consumer/entrypoint.py | 27 - .../rabbit_consumer/__init__.py | 0 .../rabbit_consumer/aq_api.py | 306 --------- .../rabbit_consumer/aq_metadata.py | 84 --- .../rabbit_consumer/consumer_config.py | 66 -- .../rabbit_consumer/message_consumer.py | 311 --------- .../rabbit_consumer/openstack_address.py | 70 -- .../rabbit_consumer/openstack_api.py | 112 ---- .../rabbit_consumer/rabbit_message.py | 60 -- .../rabbit_consumer/vm_data.py | 28 - OpenStack-Rabbit-Consumer/readme | 46 -- .../requirements-test.txt | 3 - OpenStack-Rabbit-Consumer/requirements.txt | 8 - OpenStack-Rabbit-Consumer/tests/__init__.py | 0 OpenStack-Rabbit-Consumer/tests/conftest.py | 87 --- .../tests/test_aq_api.py | 474 -------------- .../tests/test_aq_metadata.py | 109 ---- .../tests/test_consumer_config.py | 40 -- .../tests/test_message_consumer.py | 495 -------------- .../tests/test_openstack_address.py | 161 ----- .../tests/test_openstack_api.py | 161 ----- .../tests/test_rabbit_message.py | 76 --- OpenStack-Rabbit-Consumer/version.txt | 1 - charts/rabbit-consumer/.helmignore | 23 - charts/rabbit-consumer/Chart.yaml | 15 - charts/rabbit-consumer/dev-values.yaml | 21 - .../include/aquilon-gridpp-rl-ac-uk-chain.pem | 70 -- charts/rabbit-consumer/include/kinit.sh | 21 - charts/rabbit-consumer/include/krb5.conf | 41 -- .../include/sidecar-entrypoint.sh | 17 - charts/rabbit-consumer/prod-values.yaml | 14 - charts/rabbit-consumer/readme.md | 125 ---- charts/rabbit-consumer/staging-values.yaml | 20 - .../rabbit-consumer/templates/configmap.yaml | 62 -- .../rabbit-consumer/templates/deployment.yaml | 114 ---- charts/rabbit-consumer/values.yaml | 34 - 40 files changed, 4124 deletions(-) delete mode 100644 .github/workflows/rabbit_consumer.yaml delete mode 100644 .github/workflows/rabbit_consumer_chart.yaml delete mode 100644 OpenStack-Rabbit-Consumer/.pylintrc delete mode 100644 OpenStack-Rabbit-Consumer/Dockerfile delete mode 100644 OpenStack-Rabbit-Consumer/entrypoint.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/__init__.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/aq_api.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/aq_metadata.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/consumer_config.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/message_consumer.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_address.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_api.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/rabbit_message.py delete mode 100644 OpenStack-Rabbit-Consumer/rabbit_consumer/vm_data.py delete mode 100644 OpenStack-Rabbit-Consumer/readme delete mode 100644 OpenStack-Rabbit-Consumer/requirements-test.txt delete mode 100644 OpenStack-Rabbit-Consumer/requirements.txt delete mode 100644 OpenStack-Rabbit-Consumer/tests/__init__.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/conftest.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_aq_api.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_aq_metadata.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_consumer_config.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_message_consumer.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_openstack_address.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_openstack_api.py delete mode 100644 OpenStack-Rabbit-Consumer/tests/test_rabbit_message.py delete mode 100644 OpenStack-Rabbit-Consumer/version.txt delete mode 100644 charts/rabbit-consumer/.helmignore delete mode 100644 charts/rabbit-consumer/Chart.yaml delete mode 100644 charts/rabbit-consumer/dev-values.yaml delete mode 100644 charts/rabbit-consumer/include/aquilon-gridpp-rl-ac-uk-chain.pem delete mode 100644 charts/rabbit-consumer/include/kinit.sh delete mode 100644 charts/rabbit-consumer/include/krb5.conf delete mode 100644 charts/rabbit-consumer/include/sidecar-entrypoint.sh delete mode 100644 charts/rabbit-consumer/prod-values.yaml delete mode 100644 charts/rabbit-consumer/readme.md delete mode 100644 charts/rabbit-consumer/staging-values.yaml delete mode 100644 charts/rabbit-consumer/templates/configmap.yaml delete mode 100644 charts/rabbit-consumer/templates/deployment.yaml delete mode 100644 charts/rabbit-consumer/values.yaml diff --git a/.github/workflows/rabbit_consumer.yaml b/.github/workflows/rabbit_consumer.yaml deleted file mode 100644 index 16946db1..00000000 --- a/.github/workflows/rabbit_consumer.yaml +++ /dev/null @@ -1,127 +0,0 @@ -name: Rabbit Consumer - -on: - push: - branches: - - master - pull_request: - paths: - - ".github/workflows/rabbit_consumer.yaml" - - "OpenStack-Rabbit-Consumer/**" - -jobs: - test_and_lint: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.10"] - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - cache: "pip" - - name: Install dependencies - run: | - sudo apt-get update --fix-missing - python -m pip install --upgrade pip - # Required for requests-kerberos - sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install libkrb5-dev - pip install -r OpenStack-Rabbit-Consumer/requirements.txt - pip install -r OpenStack-Rabbit-Consumer/requirements-test.txt - - - name: Run tests - # Using Python3 to launch the module sets up the Python path for us - run: cd OpenStack-Rabbit-Consumer && python3 -m coverage run -m pytest . - - - name: Analyse with pylint - run: | - cd OpenStack-Rabbit-Consumer && pylint $(git ls-files '*.py') - - - name: Prepare coverage - run: | - cd OpenStack-Rabbit-Consumer && python -m coverage xml - - - name: Upload coverage to codecov - uses: codecov/codecov-action@v5 - with: - files: OpenStack-Rabbit-Consumer/coverage.xml - fail_ci_if_error: true - flags: rabbit_consumer - token: ${{ secrets.CODECOV_TOKEN }} - - push_dev_image_harbor: - runs-on: ubuntu-latest - needs: test_and_lint - steps: - - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to Harbor - uses: docker/login-action@v3 - with: - registry: harbor.stfc.ac.uk - username: ${{ secrets.STAGING_HARBOR_USERNAME }} - password: ${{ secrets.STAGING_HARBOR_TOKEN }} - - - name: Set commit SHA for later - id: commit_sha - run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - - - name: Build and push to staging project - uses: docker/build-push-action@v6 - with: - cache-from: type=gha - cache-to: type=gha,mode=max - push: true - context: "{{defaultContext}}:OpenStack-Rabbit-Consumer" - tags: "harbor.stfc.ac.uk/stfc-cloud-staging/openstack-rabbit-consumer:${{ steps.commit_sha.outputs.sha_short }}" - - - name: Inform of tagged name - run: echo "Image published to harbor.stfc.ac.uk/stfc-cloud-staging/openstack-rabbit-consumer:${{ steps.commit_sha.outputs.sha_short }}" - - push_release_image_harbor: - runs-on: ubuntu-latest - needs: test_and_lint - if: github.ref == 'refs/heads/master' - steps: - - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to Harbor - uses: docker/login-action@v3 - with: - registry: harbor.stfc.ac.uk - username: ${{ secrets.HARBOR_USERNAME }} - password: ${{ secrets.HARBOR_TOKEN }} - - - name: Get release tag for later - id: release_tag - run: echo "version=$(cat OpenStack-Rabbit-Consumer/version.txt)" >> $GITHUB_OUTPUT - - - name: Check if release file has updated - uses: dorny/paths-filter@v3 - id: release_updated - with: - filters: | - version: - - 'OpenStack-Rabbit-Consumer/version.txt' - - - name: Build and push on version change - uses: docker/build-push-action@v6 - if: steps.release_updated.outputs.version == 'true' - with: - cache-from: type=gha - cache-to: type=gha,mode=max - push: true - context: "{{defaultContext}}:OpenStack-Rabbit-Consumer" - tags: "harbor.stfc.ac.uk/stfc-cloud/openstack-rabbit-consumer:v${{ steps.release_tag.outputs.version }}" - - - name: Inform of tagged name - if: steps.release_updated.outputs.version == 'true' - run: echo "Image published to harbor.stfc.ac.uk/stfc-cloud/openstack-rabbit-consumer:v${{ steps.release_tag.outputs.version }}" diff --git a/.github/workflows/rabbit_consumer_chart.yaml b/.github/workflows/rabbit_consumer_chart.yaml deleted file mode 100644 index 2c55fa0b..00000000 --- a/.github/workflows/rabbit_consumer_chart.yaml +++ /dev/null @@ -1,45 +0,0 @@ -name: Build local helm chart -on: - push: - branches: - - master - pull_request: - paths: - - ".github/workflows/rabbit_consumer_chart.yaml" - - "charts/rabbit-consumer/**" - -jobs: - helm-lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Helm - uses: azure/setup-helm@v4 - - name: Lint Helm Chart - # Note --strict=true != --strict with the former being stricter - run: | - helm lint charts/rabbit-consumer --values charts/rabbit-consumer/values.yaml --values charts/rabbit-consumer/dev-values.yaml --strict=true - - publish: - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/master' - needs: - - helm-lint - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Configure Git - run: | - git config user.name "$GITHUB_ACTOR" - git config user.email "$GITHUB_ACTOR@users.noreply.github.com" - - - name: Install Helm - uses: azure/setup-helm@v4 - - - name: Run chart-releaser - uses: helm/chart-releaser-action@v1.6.0 - env: - CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}" diff --git a/OpenStack-Rabbit-Consumer/.pylintrc b/OpenStack-Rabbit-Consumer/.pylintrc deleted file mode 100644 index c76053a5..00000000 --- a/OpenStack-Rabbit-Consumer/.pylintrc +++ /dev/null @@ -1,617 +0,0 @@ -[MAIN] - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Clear in-memory caches upon conclusion of linting. Useful if running pylint -# in a server-like mode. -clear-cache-post-run=no - -# Load and enable all available extensions. Use --list-extensions to see a list -# all available extensions. -#enable-all-extensions= - -# In error mode, messages with a category besides ERROR or FATAL are -# suppressed, and no reports are done by default. Error mode is compatible with -# disabling specific errors. -#errors-only= - -# Always return a 0 (non-error) status code, even if lint errors are found. -# This is primarily useful in continuous integration scripts. -#exit-zero= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-allow-list= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -extension-pkg-whitelist= - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -fail-on= - -# Specify a score threshold under which the program will exit with error. -fail-under=10 - -# Interpret the stdin as a python script, whose filename needs to be passed as -# the module_or_package argument. -#from-stdin= - -# Files or directories to be skipped. They should be base names, not paths. -ignore= - -# Add files or directories matching the regular expressions patterns to the -# ignore-list. The regex matches against paths and can be in Posix or Windows -# format. Because '\' represents the directory delimiter on Windows systems, it -# can't be used as an escape character. -ignore-paths= - -# Files or directories matching the regular expression patterns are skipped. -# The regex matches against base names, not paths. The default value ignores -# Emacs file locks -ignore-patterns=^\.# - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use, and will cap the count on Windows to -# avoid hangs. -jobs=0 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.8 - -# Discover python modules and packages in the file system subtree. -recursive=no - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# In verbose mode, extra non-checker-related info will be displayed. -#verbose= - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. If left empty, argument names will be checked with the set -# naming style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. If left empty, attribute names will be checked with the set naming -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. If left empty, class attribute names will be checked -# with the set naming style. -#class-attribute-rgx= - -# Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. If left empty, class constant names will be checked with -# the set naming style. -#class-const-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. If left empty, class names will be checked with the set naming style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. If left empty, constant names will be checked with the set naming -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. If left empty, function names will be checked with the set -# naming style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=e, - i, - j, - k, - ex, - Run, - os, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. If left empty, inline iteration names will be checked -# with the set naming style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. If left empty, method names will be checked with the set naming style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. If left empty, module names will be checked with the set naming style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Regular expression matching correct type variable names. If left empty, type -# variable names will be checked with the set naming style. -#typevar-rgx= - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. If left empty, variable names will be checked with the set -# naming style. -#variable-rgx= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - asyncSetUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit - -# List of valid names for the first argument in a class method. -valid-metaclass-classmethod-first-arg=mcs - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when caught. -overgeneral-exceptions=builtins.BaseException,builtins.Exception - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, -# UNDEFINED. -confidence=HIGH, - CONTROL_FLOW, - INFERENCE, - INFERENCE_FAILURE, - UNDEFINED - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - line-too-long, - fixme - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[METHOD_ARGS] - -# List of qualified names (i.e., library.method) which require a timeout -# parameter e.g. 'requests.api.get,requests.api.post' -timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -notes-rgx= - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'fatal', 'error', 'warning', 'refactor', -# 'convention', and 'info' which contain the number of messages in each -# category, as well as 'statement' which is the total number of statements -# analyzed. This score is used by the global evaluation report (RP0004). -evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -#output-format= - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes - -# Docstrings are removed from the similarity computation -ignore-docstrings=yes - -# Imports are removed from the similarity computation -ignore-imports=yes - -# Signatures are removed from the similarity computation -ignore-signatures=yes - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. No available dictionaries : You need to install -# both the python package and the system dependency for enchant to work.. -spelling-dict= - -# List of comma separated words that should be considered directives if they -# appear at the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of symbolic message names to ignore for Mixin members. -ignored-checks-for-mixins=no-member, - not-async-context-manager, - not-context-manager, - attribute-defined-outside-init - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# Regex pattern to define which classes are considered mixins. -mixin-class-rgx=.*[Mm]ixin - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of names allowed to shadow builtins -allowed-redefined-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/OpenStack-Rabbit-Consumer/Dockerfile b/OpenStack-Rabbit-Consumer/Dockerfile deleted file mode 100644 index 37986f52..00000000 --- a/OpenStack-Rabbit-Consumer/Dockerfile +++ /dev/null @@ -1,33 +0,0 @@ -FROM python:3.10 - -WORKDIR /usr/src/app - -RUN apt-get update \ - && DEBIAN_FRONTEND=noninteractive \ - apt-get install -y --no-install-recommends \ - krb5-user \ - && rm -rf /var/lib/apt/lists/* - -COPY requirements.txt ./ -RUN pip install --no-cache-dir -r requirements.txt - -COPY . . - -ENV AQ_PREFIX=NOT_SET \ - AQ_URL=NOT_SET\ - # - KRB5CCNAME=NOT_SET \ - # - RABBIT_HOST=NOT_SET \ - RABBIT_PORT=NOT_SET \ - RABBIT_USERNAME=NOT_SET \ - RABBIT_PASSWORD=NOT_SET\ - # - OPENSTACK_AUTH_URL=NOT_SET \ - OPENSTACK_COMPUTE_URL=NOT_SET \ - OPENSTACK_USERNAME=NOT_SET \ - OPENSTACK_PASSWORD=NOT_SET - -ENV LOG_LEVEL=INFO - -CMD [ "python", "./entrypoint.py"] diff --git a/OpenStack-Rabbit-Consumer/entrypoint.py b/OpenStack-Rabbit-Consumer/entrypoint.py deleted file mode 100644 index e15fc2d2..00000000 --- a/OpenStack-Rabbit-Consumer/entrypoint.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/python3 -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Prepares the logging and initiates the consumers. -""" -import logging -import logging.handlers -import os -import sys - - -def _prep_logging(): - logger = logging.getLogger("rabbit_consumer") - logger.setLevel(os.getenv("LOG_LEVEL", "INFO").upper()) - logger.addHandler(logging.StreamHandler(sys.stdout)) - - logging.getLogger("requests").setLevel(logging.WARNING) - logging.getLogger("urllib3").setLevel(logging.WARNING) - - -if __name__ == "__main__": - _prep_logging() - - from rabbit_consumer.message_consumer import initiate_consumer - - initiate_consumer() diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/__init__.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/aq_api.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/aq_api.py deleted file mode 100644 index dcbadaeb..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/aq_api.py +++ /dev/null @@ -1,306 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file defines methods to be used to interact with the -Aquilon API -""" -import logging -import subprocess -from typing import Optional, List - -import requests -from requests.adapters import HTTPAdapter -from requests_kerberos import HTTPKerberosAuth -from urllib3.util.retry import Retry - -from rabbit_consumer.consumer_config import ConsumerConfig -from rabbit_consumer.aq_metadata import AqMetadata -from rabbit_consumer.openstack_address import OpenstackAddress -from rabbit_consumer.rabbit_message import RabbitMessage -from rabbit_consumer.vm_data import VmData - -HOST_CHECK_SUFFIX = "/host/{0}" - -UPDATE_INTERFACE_SUFFIX = "/machine/{0}/interface/{1}?boot&default_route" - -DELETE_HOST_SUFFIX = "/host/{0}" -DELETE_MACHINE_SUFFIX = "/machine/{0}" - -logger = logging.getLogger(__name__) - - -class AquilonError(Exception): - """ - Base class for Aquilon errors - """ - - -def verify_kerberos_ticket() -> bool: - """ - Check for a valid Kerberos ticket from a sidecar, or on the host - Raises a RuntimeError if no ticket is found - """ - logger.debug("Checking for valid Kerberos Ticket") - - if subprocess.call(["klist", "-s"]) == 1: - raise RuntimeError("No shared Kerberos ticket found.") - - logger.debug("Kerberos ticket success") - return True - - -def setup_requests( - url: str, method: str, desc: str, params: Optional[dict] = None -) -> str: - """ - Passes a request to the Aquilon API - """ - verify_kerberos_ticket() - logger.debug("%s: %s - params: %s", method, url, params) - - session = requests.Session() - session.verify = "/etc/grid-security/certificates/aquilon-gridpp-rl-ac-uk-chain.pem" - retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[503]) - session.mount("https://", HTTPAdapter(max_retries=retries)) - if method == "post": - response = session.post(url, auth=HTTPKerberosAuth(), params=params) - elif method == "put": - response = session.put(url, auth=HTTPKerberosAuth(), params=params) - elif method == "delete": - response = session.delete(url, auth=HTTPKerberosAuth(), params=params) - else: - response = session.get(url, auth=HTTPKerberosAuth(), params=params) - - if response.status_code == 400: - # This might be an expected error, so don't log it - logger.debug("AQ Error Response: %s", response.text) - raise AquilonError(response.text) - - if response.status_code != 200: - logger.error("%s: Failed: %s", desc, response.text) - logger.error(url) - raise ConnectionError( - f"Failed {desc}: {response.status_code} -" "{response.text}" - ) - - logger.debug("Success: %s ", desc) - logger.debug("AQ Response: %s", response.text) - return response.text - - -def aq_make(addresses: List[OpenstackAddress]) -> None: - """ - Runs AQ make against a list of addresses passed to refresh - the given host - """ - # Manage and make these back to default domain and personality - address = addresses[0] - hostname = address.hostname - logger.debug("Attempting to make templates for %s", hostname) - - if not hostname or not hostname.strip(): - raise ValueError("Hostname cannot be empty") - - url = ConsumerConfig().aq_url + f"/host/{hostname}/command/make" - try: - setup_requests(url, "post", "Make Template") - # suppressing 400 error that occurs - the VM gets created fine - # TODO: find out why this occurs - except AquilonError: - logger.debug("make request failed, continuing") - - -def aq_manage(addresses: List[OpenstackAddress], image_meta: AqMetadata) -> None: - """ - Manages the list of Aquilon addresses passed to it back to the production domain - """ - address = addresses[0] - hostname = address.hostname - logger.debug("Attempting to manage %s", hostname) - - params = { - "hostname": hostname, - "force": True, - } - if image_meta.aq_sandbox: - params["sandbox"] = image_meta.aq_sandbox - else: - params["domain"] = image_meta.aq_domain - - url = ConsumerConfig().aq_url + f"/host/{hostname}/command/manage" - setup_requests(url, "post", "Manage Host", params=params) - - -def create_machine(message: RabbitMessage, vm_data: VmData) -> str: - """ - Creates a machine in Aquilon. Returns the machine name - """ - logger.debug("Attempting to create machine for %s ", vm_data.virtual_machine_id) - - params = { - "model": "vm-openstack", - "serial": vm_data.virtual_machine_id, - "vmhost": message.payload.vm_host, - "cpucount": message.payload.vcpus, - "memory": message.payload.memory_mb, - } - - url = ConsumerConfig().aq_url + f"/next_machine/{ConsumerConfig().aq_prefix}" - response = setup_requests(url, "put", "Create Machine", params=params) - return response - - -def delete_machine(machine_name: str) -> None: - """ - Deletes a machine in Aquilon - """ - logger.debug("Attempting to delete machine for %s", machine_name) - - url = ConsumerConfig().aq_url + DELETE_MACHINE_SUFFIX.format(machine_name) - - setup_requests(url, "delete", "Delete Machine") - - -def create_host( - image_meta: AqMetadata, addresses: List[OpenstackAddress], machine_name: str -) -> None: - """ - Creates a host in Aquilon - """ - config = ConsumerConfig() - - address = addresses[0] - params = { - "machine": machine_name, - "ip": address.addr, - "archetype": image_meta.aq_archetype, - "personality": image_meta.aq_personality, - "osname": image_meta.aq_os, - "osversion": image_meta.aq_os_version, - } - - if image_meta.aq_sandbox: - params["sandbox"] = image_meta.aq_sandbox - else: - params["domain"] = image_meta.aq_domain - - logger.debug("Attempting to create host for %s ", address.hostname) - url = config.aq_url + f"/host/{address.hostname}" - setup_requests(url, "put", "Host Create", params=params) - - -def delete_host(hostname: str) -> None: - """ - Deletes a host in Aquilon - """ - logger.debug("Attempting to delete host for %s ", hostname) - url = ConsumerConfig().aq_url + DELETE_HOST_SUFFIX.format(hostname) - setup_requests(url, "delete", "Host Delete") - - -def delete_address(address: str, machine_name: str) -> None: - """ - Deletes an address in Aquilon - """ - logger.debug("Attempting to delete address for %s ", address) - url = ConsumerConfig().aq_url + "/interface_address" - params = {"ip": address, "machine": machine_name, "interface": "eth0"} - setup_requests(url, "delete", "Address Delete", params=params) - - -def delete_interface(machine_name: str) -> None: - """ - Deletes a host interface in Aquilon - """ - logger.debug("Attempting to delete interface for %s ", machine_name) - url = ConsumerConfig().aq_url + "/interface/command/del" - params = {"interface": "eth0", "machine": machine_name} - setup_requests(url, "post", "Interface Delete", params=params) - - -def add_machine_nics(machine_name: str, addresses: List[OpenstackAddress]) -> None: - """ - Adds NICs to a given machine in Aquilon based on the VM addresses - """ - # We only add the first host interface for now - # this avoids having to do a lot of work to figure out - # which interface names we have to use to clean-up - address = addresses[0] - interface_name = "eth0" - - logger.debug( - "Attempting to add interface %s to machine %s ", - interface_name, - machine_name, - ) - url = ( - ConsumerConfig().aq_url + f"/machine/{machine_name}/interface/{interface_name}" - ) - setup_requests( - url, "put", "Add Machine Interface", params={"mac": address.mac_addr} - ) - - -def set_interface_bootable(machine_name: str, interface_name: str) -> None: - """ - Sets a given interface on a machine to be bootable - """ - logger.debug("Attempting to bootable %s ", machine_name) - - url = ConsumerConfig().aq_url + UPDATE_INTERFACE_SUFFIX.format( - machine_name, interface_name - ) - - setup_requests(url, "post", "Update Machine Interface") - - -def search_machine_by_serial(vm_data: VmData) -> Optional[str]: - """ - Searches for a machine in Aquilon based on a serial number - """ - logger.debug("Searching for host with serial %s", vm_data.virtual_machine_id) - url = ConsumerConfig().aq_url + "/find/machine" - params = {"serial": vm_data.virtual_machine_id} - response = setup_requests(url, "get", "Search Host", params=params).strip() - - if response: - return response - return None - - -def search_host_by_machine(machine_name: str) -> Optional[str]: - """ - Searches for a host in Aquilon based on a machine name - """ - logger.debug("Searching for host with machine name %s", machine_name) - url = ConsumerConfig().aq_url + "/find/host" - params = {"machine": machine_name} - response = setup_requests(url, "get", "Search Host", params=params).strip() - - if response: - return response - return None - - -def get_machine_details(machine_name: str) -> str: - """ - Gets a machine's details as a string - """ - logger.debug("Getting machine details for %s", machine_name) - url = ConsumerConfig().aq_url + f"/machine/{machine_name}" - return setup_requests(url, "get", "Get machine details").strip() - - -def check_host_exists(hostname: str) -> bool: - """ - Checks if a host exists in Aquilon - """ - logger.debug("Checking if hostname exists: %s", hostname) - url = ConsumerConfig().aq_url + HOST_CHECK_SUFFIX.format(hostname) - try: - setup_requests(url, "get", "Check Host") - except AquilonError as err: - if f"Host {hostname} not found." in str(err): - return False - raise - return True diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/aq_metadata.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/aq_metadata.py deleted file mode 100644 index d84004bd..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/aq_metadata.py +++ /dev/null @@ -1,84 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file defines the class to handle deserialised metadata for -Aquilon -""" -import logging -from dataclasses import dataclass -from typing import Dict, Optional, Union - -from mashumaro import DataClassDictMixin -from mashumaro.config import BaseConfig - -logger = logging.getLogger(__name__) - - -# Case in-sensitive values that are considered invalid -_INVALID_VALUES = ["none", "null", ""] - - -@dataclass -class AqMetadata(DataClassDictMixin): - """ - Deserialised metadata that is set either on an Openstack image - or a VM's metadata - """ - - aq_archetype: str - aq_domain: str - - aq_personality: str - aq_os_version: str - aq_os: str - - aq_sandbox: Optional[str] = None - - # pylint: disable=too-few-public-methods - class Config(BaseConfig): - """ - Sets the aliases for the metadata keys - """ - - aliases = { - "aq_archetype": "AQ_ARCHETYPE", - "aq_domain": "AQ_DOMAIN", - "aq_sandbox": "AQ_SANDBOX", - "aq_personality": "AQ_PERSONALITY", - "aq_os_version": "AQ_OSVERSION", - "aq_os": "AQ_OS", - } - - def override_from_vm_meta(self, vm_meta: Dict[str, str]): - """ - Overrides the values in the metadata with the values from the VM's - metadata if they are present and sane - """ - for attr, alias in self.Config.aliases.items(): - if alias not in vm_meta: - continue - - if not self._is_metadata_val_valid(vm_meta[alias]): - logger.warning( - "Invalid metadata value '%s' found for metadata property '%s', skipping", - vm_meta[alias], - alias, - ) - continue - - setattr(self, attr, vm_meta[alias]) - - @staticmethod - def _is_metadata_val_valid(val: Union[str, None]) -> bool: - """ - Tests if an individual metadata value is sane, i.e. - a str which is not null, or a blocked value. - If this is valid, it returns true - """ - if not val: - return False - - user_val = val.lower().strip() - if user_val in _INVALID_VALUES: - return False - return True diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/consumer_config.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/consumer_config.py deleted file mode 100644 index 16e4e2d5..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/consumer_config.py +++ /dev/null @@ -1,66 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file allows us to set environment variables so that -credentials are not exposed -""" - -import os -from dataclasses import dataclass, field -from functools import partial - - -@dataclass -class _AqFields: - """ - Dataclass for all Aquilon config elements. These are pulled from - environment variables. - """ - - aq_prefix: str = field(default_factory=partial(os.getenv, "AQ_PREFIX")) - aq_url: str = field(default_factory=partial(os.getenv, "AQ_URL")) - - -@dataclass -class _OpenstackFields: - """ - Dataclass for all Openstack config elements. These are pulled from - environment variables. - """ - - openstack_auth_url: str = field( - default_factory=partial(os.getenv, "OPENSTACK_AUTH_URL") - ) - openstack_compute_url: str = field( - default_factory=partial(os.getenv, "OPENSTACK_COMPUTE_URL") - ) - openstack_username: str = field( - default_factory=partial(os.getenv, "OPENSTACK_USERNAME") - ) - openstack_password: str = field( - default_factory=partial(os.getenv, "OPENSTACK_PASSWORD") - ) - - -@dataclass -class _RabbitFields: - """ - Dataclass for all RabbitMQ config elements. These are pulled from - environment variables. - """ - - rabbit_hosts: str = field(default_factory=partial(os.getenv, "RABBIT_HOST", None)) - rabbit_port: str = field(default_factory=partial(os.getenv, "RABBIT_PORT", None)) - rabbit_username: str = field( - default_factory=partial(os.getenv, "RABBIT_USERNAME", None) - ) - rabbit_password: str = field( - default_factory=partial(os.getenv, "RABBIT_PASSWORD", None) - ) - - -@dataclass -class ConsumerConfig(_AqFields, _OpenstackFields, _RabbitFields): - """ - Mix-in class for all known config elements - """ diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/message_consumer.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/message_consumer.py deleted file mode 100644 index 9aef754e..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/message_consumer.py +++ /dev/null @@ -1,311 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file manages how rabbit messages stating AQ VM creation and deletion -should be handled and processed between the consumer and Aquilon -""" -import json -import logging -import socket -from typing import Optional, List - -import rabbitpy - -from rabbit_consumer import aq_api -from rabbit_consumer import openstack_api -from rabbit_consumer.aq_api import verify_kerberos_ticket -from rabbit_consumer.consumer_config import ConsumerConfig -from rabbit_consumer.aq_metadata import AqMetadata -from rabbit_consumer.openstack_address import OpenstackAddress -from rabbit_consumer.rabbit_message import RabbitMessage, MessageEventType -from rabbit_consumer.vm_data import VmData - -logger = logging.getLogger(__name__) -SUPPORTED_MESSAGE_TYPES = { - "create": "compute.instance.create.end", - "delete": "compute.instance.delete.start", -} - - -def is_aq_managed_image(vm_data: VmData) -> bool: - """ - Check to see if the metadata in the message contains entries that suggest it - is for an Aquilon VM. - """ - image = openstack_api.get_image(vm_data) - if not image: - logger.info("No image found for %s", vm_data.virtual_machine_id) - return False - - if "AQ_OS" not in image.metadata: - logger.debug("Skipping non-Aquilon image: %s", image.name) - return False - return True - - -def get_aq_build_metadata(vm_data: VmData) -> AqMetadata: - """ - Gets the Aq Metadata from either the image or VM (where - VM metadata takes precedence) to determine the AQ params - """ - image = openstack_api.get_image(vm_data) - image_meta = AqMetadata.from_dict(image.metadata) - - vm_metadata = openstack_api.get_server_metadata(vm_data) - image_meta.override_from_vm_meta(vm_metadata) - return image_meta - - -def consume(message: RabbitMessage) -> None: - """ - Consumes a message from the rabbit queue and calls the appropriate - handler based on the event type. - """ - if message.event_type == SUPPORTED_MESSAGE_TYPES["create"]: - handle_create_machine(message) - - elif message.event_type == SUPPORTED_MESSAGE_TYPES["delete"]: - handle_machine_delete(message) - - else: - raise ValueError(f"Unsupported message type: {message.event_type}") - - -def delete_machine( - vm_data: VmData, network_details: Optional[OpenstackAddress] = None -) -> None: - """ - Deletes a machine in Aquilon and all associated addresses based on - the serial, MAC and hostname provided. This is the best effort attempt - to clean-up, since we can have partial or incorrect information. - """ - # First handle hostnames - if network_details and aq_api.check_host_exists(network_details.hostname): - logger.info("Deleting host %s", network_details.hostname) - aq_api.delete_host(network_details.hostname) - - machine_name = aq_api.search_machine_by_serial(vm_data) - if not machine_name: - logger.info("No existing record found for %s", vm_data.virtual_machine_id) - return - - # We have to do this manually because AQ has neither a: - # - Just delete the machine please - # - Delete this if it exists - # So alas we have to do everything by hand, whilst adhering to random rules - # of deletion orders which it enforces... - - hostname = aq_api.search_host_by_machine(machine_name) - machine_details = aq_api.get_machine_details(machine_name) - - # We have to clean-up all the interfaces and addresses first - # we could have a machine which points to a different hostname - if hostname: - if aq_api.check_host_exists(hostname): - # This is a different hostname to the one we have in the message - # so, we need to delete it - logger.info("Host exists for %s. Deleting old", hostname) - aq_api.delete_host(hostname) - else: - # Delete the interfaces - ipv4_address = socket.gethostbyname(hostname) - if ipv4_address in machine_details: - aq_api.delete_address(ipv4_address, machine_name) - - if "eth0" in machine_details: - aq_api.delete_interface(machine_name) - - logger.info("Machine exists for %s. Deleting old", vm_data.virtual_machine_id) - - # Then delete the machine - aq_api.delete_machine(machine_name) - - -def check_machine_valid(rabbit_message: RabbitMessage) -> bool: - """ - Checks to see if the machine is valid for creating in Aquilon. - """ - vm_data = VmData.from_message(rabbit_message) - if not openstack_api.check_machine_exists(vm_data): - # User has likely deleted the machine since we got here - logger.warning( - "Machine %s does not exist, skipping creation", vm_data.virtual_machine_id - ) - return False - - if not is_aq_managed_image(vm_data): - logger.debug("Ignoring non AQ Image: %s", rabbit_message) - return False - - return True - - -def handle_create_machine(rabbit_message: RabbitMessage) -> None: - """ - Handles the creation of a machine in Aquilon. This includes - creating the machine, adding the nics, and managing the host. - """ - logger.info("=== Received Aquilon VM create message ===") - _print_debug_logging(rabbit_message) - - if not check_machine_valid(rabbit_message): - return - - vm_data = VmData.from_message(rabbit_message) - - image_meta = get_aq_build_metadata(vm_data) - network_details = openstack_api.get_server_networks(vm_data) - - if not network_details or not network_details[0].hostname: - vm_name = rabbit_message.payload.vm_name - logger.info("Skipping novalocal only host: %s", vm_name) - return - - logger.info("Clearing any existing records from Aquilon") - delete_machine(vm_data, network_details[0]) - - # Configure networking - machine_name = aq_api.create_machine(rabbit_message, vm_data) - aq_api.add_machine_nics(machine_name, network_details) - aq_api.set_interface_bootable(machine_name, "eth0") - - # Manage host in Aquilon - aq_api.create_host(image_meta, network_details, machine_name) - aq_api.aq_make(network_details) - - add_aq_details_to_metadata(vm_data, network_details) - - logger.info( - "=== Finished Aquilon creation hook for VM %s ===", vm_data.virtual_machine_id - ) - - -def _print_debug_logging(rabbit_message: RabbitMessage) -> None: - """ - Prints debug logging for the Aquilon message. - """ - vm_data = VmData.from_message(rabbit_message) - logger.debug( - "Project Name: %s (%s)", rabbit_message.project_name, vm_data.project_id - ) - logger.info( - "VM Name: %s (%s) ", rabbit_message.payload.vm_name, vm_data.virtual_machine_id - ) - logger.debug("Username: %s", rabbit_message.user_name) - - -def handle_machine_delete(rabbit_message: RabbitMessage) -> None: - """ - Handles the deletion of a machine in Aquilon. This includes - deleting the machine and the host. - """ - logger.info("=== Received Aquilon VM delete message ===") - _print_debug_logging(rabbit_message) - - vm_data = VmData.from_message(rabbit_message) - delete_machine(vm_data=vm_data) - - logger.info( - "=== Finished Aquilon deletion hook for VM %s ===", vm_data.virtual_machine_id - ) - - -def add_aq_details_to_metadata( - vm_data: VmData, network_details: List[OpenstackAddress] -) -> None: - """ - Adds the hostname to the metadata of the VM. - """ - if not openstack_api.check_machine_exists(vm_data): - # User has likely deleted the machine since we got here - logger.warning( - "Machine %s does not exist, skipping metadata update", - vm_data.virtual_machine_id, - ) - return - - hostnames = [i.hostname for i in network_details] - metadata = { - "HOSTNAMES": ",".join(hostnames), - "AQ_STATUS": "SUCCESS", - "AQ_MACHINE": aq_api.search_machine_by_serial(vm_data), - } - openstack_api.update_metadata(vm_data, metadata) - - -def on_message(message: rabbitpy.Message) -> None: - """ - Deserializes the message and calls the consume function on message. - """ - raw_body = message.body - logger.debug("New message: %s", raw_body) - - body = json.loads(raw_body.decode("utf-8"))["oslo.message"] - parsed_event = MessageEventType.from_json(body) - if parsed_event.event_type not in SUPPORTED_MESSAGE_TYPES.values(): - logger.info("Ignoring event_type: %s", parsed_event.event_type) - message.ack() - return - - decoded = RabbitMessage.from_json(body) - logger.debug("Decoded message: %s", decoded) - - consume(decoded) - message.ack() - - -def generate_login_str(config: ConsumerConfig) -> str: - """ - Generates the login string for the rabbit connection. - """ - if not config.rabbit_hosts: - raise ValueError("No rabbit hosts provided") - - if not isinstance(config.rabbit_hosts, str): - raise ValueError("Rabbit hosts must be a comma separated string of hosts") - - debug_str = "amqp://" - connect_str = "amqp://" - - for host in config.rabbit_hosts.split(","): - host = host.strip() - connect_str += f"{config.rabbit_username}:{config.rabbit_password}@{host}:{config.rabbit_port}," - debug_str += f"{config.rabbit_username}:@{host}:{config.rabbit_port}," - - # Trim the trailing comma - connect_str = connect_str[:-1] - debug_str = debug_str[:-1] - - logger.debug("Connecting to rabbit with: %s", debug_str) - - return connect_str - - -def initiate_consumer() -> None: - """ - Initiates the message consumer and starts consuming messages in a loop. - This includes setting up the rabbit connection and channel. - """ - logger.debug("Initiating message consumer") - # Ensure we have valid creds before trying to contact rabbit - verify_kerberos_ticket() - - exchanges = ["nova"] - - config = ConsumerConfig() - login_str = generate_login_str(config) - with rabbitpy.Connection(login_str) as conn: - with conn.channel() as channel: - logger.debug("Connected to RabbitMQ") - - # Durable indicates that the queue will survive a broker restart - queue = rabbitpy.Queue(channel, name="ral.info", durable=True) - for exchange in exchanges: - logger.debug("Binding to exchange: %s", exchange) - queue.bind(exchange, routing_key="ral.info") - - # Consume the messages from generator - message: rabbitpy.Message - logger.debug("Starting to consume messages") - for message in queue: - on_message(message) diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_address.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_address.py deleted file mode 100644 index c784c4ed..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_address.py +++ /dev/null @@ -1,70 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file deserializes a server's network address from an -OpenStack API response -""" -import logging -import socket -from dataclasses import dataclass, field -from typing import Dict, Optional - -from mashumaro import DataClassDictMixin, field_options - -logger = logging.getLogger(__name__) - - -@dataclass -class OpenstackAddress(DataClassDictMixin): - """ - Deserializes the Openstack API response for a server's - network addresses. This is expected to be called from the - OpenstackAPI. To get an actual list use the Openstack API. - """ - - version: int - addr: str - mac_addr: str = field(metadata=field_options(alias="OS-EXT-IPS-MAC:mac_addr")) - hostname: Optional[str] = None - - @staticmethod - def get_internal_networks(addresses: Dict) -> list["OpenstackAddress"]: - """ - Returns a list of internal network addresses. This - is expected to be called from the OpenstackAPI. To get an actual - list use the Openstack API wrapper directly. - """ - internal_networks = [] - for address in addresses["Internal"]: - found = OpenstackAddress.from_dict(address) - found.hostname = OpenstackAddress.convert_hostnames(found.addr) - internal_networks.append(found) - return internal_networks - - @staticmethod - def get_services_networks(addresses: Dict) -> list["OpenstackAddress"]: - """ - Returns a list of network addresses on the services subnet. This - is expected to be called from the OpenstackAPI. To get an actual - list use the Openstack API wrapper directly. - """ - services_networks = [] - for address in addresses["Services"]: - found = OpenstackAddress.from_dict(address) - found.hostname = OpenstackAddress.convert_hostnames(found.addr) - services_networks.append(found) - return services_networks - - @staticmethod - def convert_hostnames(ip_addr: str) -> str: - """ - Converts an ip address to a hostname using DNS lookup. - """ - try: - return socket.gethostbyaddr(ip_addr)[0] - except socket.herror: - logger.info("No hostname found for ip %s", ip_addr) - raise - except Exception: - logger.error("Problem converting ip to hostname") - raise diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_api.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_api.py deleted file mode 100644 index 3c6119c3..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/openstack_api.py +++ /dev/null @@ -1,112 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file defines methods for connecting and interacting with the -OpenStack API -""" -import logging -from typing import List, Optional - -import openstack -from openstack.compute.v2.image import Image -from openstack.compute.v2.server import Server - -from rabbit_consumer.consumer_config import ConsumerConfig -from rabbit_consumer.openstack_address import OpenstackAddress -from rabbit_consumer.vm_data import VmData - -logger = logging.getLogger(__name__) - - -class OpenstackConnection: - """ - Wrapper for Openstack connection, to reduce boilerplate code - in subsequent functions. - """ - - def __init__(self): - self.conn = None - - def __enter__(self): - self.conn = openstack.connect( - auth_url=ConsumerConfig().openstack_auth_url, - username=ConsumerConfig().openstack_username, - password=ConsumerConfig().openstack_password, - project_name="admin", - user_domain_name="Default", - project_domain_name="default", - ) - return self.conn - - def __exit__(self, exc_type, exc_val, exc_tb): - self.conn.close() - - -def check_machine_exists(vm_data: VmData) -> bool: - """ - Checks to see if the machine exists in Openstack. - """ - with OpenstackConnection() as conn: - return bool(conn.compute.find_server(vm_data.virtual_machine_id)) - - -def get_server_details(vm_data: VmData) -> Server: - """ - Gets the server details from Openstack with details included - """ - with OpenstackConnection() as conn: - # Workaround for details missing from find_server - # on the current version of openstacksdk - found = list( - conn.compute.servers(uuid=vm_data.virtual_machine_id, all_projects=True) - ) - if not found: - raise ValueError(f"Server not found for id: {vm_data.virtual_machine_id}") - return found[0] - - -def get_server_networks(vm_data: VmData) -> List[OpenstackAddress]: - """ - Gets the networks from Openstack for the virtual machine as a list - of deserialized OpenstackAddresses. - """ - server = get_server_details(vm_data) - if "Internal" in server.addresses: - return OpenstackAddress.get_internal_networks(server.addresses) - if "Services" in server.addresses: - return OpenstackAddress.get_services_networks(server.addresses) - logger.warning("No internal or services network found for server %s", server.name) - return [] - - -def get_server_metadata(vm_data: VmData) -> dict: - """ - Gets the metadata from Openstack for the virtual machine. - """ - server = get_server_details(vm_data) - return server.metadata - - -def get_image(vm_data: VmData) -> Optional[Image]: - """ - Gets the image name from Openstack for the virtual machine. - """ - server = get_server_details(vm_data) - uuid = server.image.id - if not uuid: - return None - - with OpenstackConnection() as conn: - image = conn.compute.find_image(uuid) - return image - - -def update_metadata(vm_data: VmData, metadata) -> None: - """ - Updates the metadata for the virtual machine. - """ - server = get_server_details(vm_data) - with OpenstackConnection() as conn: - conn.compute.set_server_metadata(server, **metadata) - - logger.debug("Setting metadata successful") diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/rabbit_message.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/rabbit_message.py deleted file mode 100644 index 384fbbce..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/rabbit_message.py +++ /dev/null @@ -1,60 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file handles how messages from Rabbit are processed and the -message extracted -""" -from dataclasses import dataclass, field -from typing import Optional - -from mashumaro import field_options -from mashumaro.mixins.json import DataClassJSONMixin - - -@dataclass -class MessageEventType(DataClassJSONMixin): - """ - Parses a raw message from RabbitMQ to determine the event_type - """ - - event_type: str - - -@dataclass -class RabbitMeta(DataClassJSONMixin): - """ - Deserialised custom VM metadata - """ - - machine_name: Optional[str] = field( - metadata=field_options(alias="AQ_MACHINENAME"), default=None - ) - - -@dataclass -# pylint: disable=too-many-instance-attributes -class RabbitPayload(DataClassJSONMixin): - """ - Deserialises the payload of a RabbitMQ message - """ - - instance_id: str - vm_name: str = field(metadata=field_options(alias="display_name")) - vcpus: int - memory_mb: int - vm_host: str = field(metadata=field_options(alias="host")) - - metadata: RabbitMeta - - -@dataclass -class RabbitMessage(DataClassJSONMixin): - """ - Deserialised RabbitMQ message - """ - - event_type: str - project_name: str = field(metadata=field_options(alias="_context_project_name")) - project_id: str = field(metadata=field_options(alias="_context_project_id")) - user_name: str = field(metadata=field_options(alias="_context_user_name")) - payload: RabbitPayload diff --git a/OpenStack-Rabbit-Consumer/rabbit_consumer/vm_data.py b/OpenStack-Rabbit-Consumer/rabbit_consumer/vm_data.py deleted file mode 100644 index b4e6820d..00000000 --- a/OpenStack-Rabbit-Consumer/rabbit_consumer/vm_data.py +++ /dev/null @@ -1,28 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -This file has a dataclass for creating VM data objects from messages -""" -from dataclasses import dataclass - -from rabbit_consumer.rabbit_message import RabbitMessage - - -@dataclass -class VmData: - """ - Holds fields that change between different virtual machines - """ - - project_id: str - virtual_machine_id: str - - @staticmethod - def from_message(message: RabbitMessage) -> "VmData": - """ - Creates a VmData object from a RabbitMessage - """ - return VmData( - project_id=message.project_id, - virtual_machine_id=message.payload.instance_id, - ) diff --git a/OpenStack-Rabbit-Consumer/readme b/OpenStack-Rabbit-Consumer/readme deleted file mode 100644 index 4e7a28bd..00000000 --- a/OpenStack-Rabbit-Consumer/readme +++ /dev/null @@ -1,46 +0,0 @@ -Openstack Rabbit Consumers ---------------------------- - -The script will monitor the rabbit consumers, and automatically register machines -with the configuration management tool. - -This container assumes that a sidecar container is running to handle krb5 machine authentication. - -Release -------- - -Pull requests will push a tagged image (with the commit sha) to -harbor.stfc.ac.uk/stfc-cloud-staging/openstack-rabbit-consumer:sha - -(Where the SHA can be found in the GH actions build logs) - -To release a new version, update version.txt with the updated version. -When the PR is merged, a new image will be pushed to harbor.stfc.ac.uk/stfc-cloud-staging/openstack-rabbit-consumer - -You may need to update the version in the helm chart to match the new version. - -Testing Locally -=============== - -Initial setup -------------- - -- Spin up minikube locally -- Install the secrets, as per the instructions in the chart -- Make docker use the minikube docker daemon in your current shell: - -Testing -------- - -- Build the docker image locally: -`eval $(minikube docker-env)` -`docker build -t rabbit-consumer:1 .` -- cd to the chart directory: -`cd ../charts/rabbit-consumer` -- Install/Upgrade the chart with your changes: -`helm install rabbit-consumers . -f values.yaml -f dev-values.yaml -n rabbit-consumers` -- To deploy a new image, rebuild and delete the existing pod: -`docker build . -t rabbit-consumer:n . && helm upgrade rabbit-consumers . -f values.yaml -f prod-values.yaml -n rabbit-consumers` -- Logs can be found with: -`kubectl logs deploy/rabbit-consumers -n rabbit-consumers` - diff --git a/OpenStack-Rabbit-Consumer/requirements-test.txt b/OpenStack-Rabbit-Consumer/requirements-test.txt deleted file mode 100644 index c74b26f9..00000000 --- a/OpenStack-Rabbit-Consumer/requirements-test.txt +++ /dev/null @@ -1,3 +0,0 @@ -coverage -pylint -pytest \ No newline at end of file diff --git a/OpenStack-Rabbit-Consumer/requirements.txt b/OpenStack-Rabbit-Consumer/requirements.txt deleted file mode 100644 index 7ed9bc5e..00000000 --- a/OpenStack-Rabbit-Consumer/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -rabbitpy -requests -requests_kerberos -pika -urllib3 -mashumaro -openstacksdk -six # for openstacksdk diff --git a/OpenStack-Rabbit-Consumer/tests/__init__.py b/OpenStack-Rabbit-Consumer/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/OpenStack-Rabbit-Consumer/tests/conftest.py b/OpenStack-Rabbit-Consumer/tests/conftest.py deleted file mode 100644 index 3d8098cb..00000000 --- a/OpenStack-Rabbit-Consumer/tests/conftest.py +++ /dev/null @@ -1,87 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Fixtures for unit tests, used to create mock objects -""" -import uuid - -import pytest - -from rabbit_consumer.aq_metadata import AqMetadata -from rabbit_consumer.openstack_address import OpenstackAddress -from rabbit_consumer.rabbit_message import RabbitMessage, RabbitMeta, RabbitPayload -from rabbit_consumer.vm_data import VmData - - -@pytest.fixture(name="image_metadata") -def fixture_image_metadata(): - """ - Creates an ImageMetadata object with mock data - which represent an example OpenStack image - """ - return AqMetadata( - aq_archetype="archetype_mock", - aq_domain="domain_mock", - aq_personality="personality_mock", - aq_os="os_mock", - aq_os_version="osversion_mock", - ) - - -@pytest.fixture(name="rabbit_message") -def fixture_rabbit_message(): - """ - Creates a RabbitMessage object with mock data - """ - rabbit_payload = RabbitPayload( - instance_id="instance_id_mock", - memory_mb=1024, - metadata=RabbitMeta(), - vcpus=2, - vm_host="vm_host_mock", - vm_name="vm_name_mock", - ) - - return RabbitMessage( - event_type="event_type_mock", - payload=rabbit_payload, - project_id="project_id_mock", - project_name="project_name_mock", - user_name="user_name_mock", - ) - - -@pytest.fixture(name="vm_data") -def fixture_vm_data(): - """ - Creates a VmData object with mock data - """ - return VmData( - project_id="project_id_mock", virtual_machine_id="virtual_machine_id_mock" - ) - - -@pytest.fixture(name="openstack_address") -def fixture_openstack_address(): - """ - Creates an OpenstackAddress object with mock data - """ - return OpenstackAddress( - addr="127.0.0.123", - mac_addr="00:00:00:00:00:00", - version=4, - hostname=str(uuid.uuid4()), - ) - - -@pytest.fixture(name="openstack_address_list") -def fixture_openstack_address_list(openstack_address): - """ - Creates a list of OpenstackAddress objects with mock data - """ - addresses = [openstack_address, openstack_address] - for i in addresses: - # Set a unique hostname for each address, otherwise the fixture - # will return the same object twice - i.hostname = str(uuid.uuid4()) - return addresses diff --git a/OpenStack-Rabbit-Consumer/tests/test_aq_api.py b/OpenStack-Rabbit-Consumer/tests/test_aq_api.py deleted file mode 100644 index 4c29ea84..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_aq_api.py +++ /dev/null @@ -1,474 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Tests that we perform the correct REST requests against -the Aquilon API -""" -from unittest import mock -from unittest.mock import patch, call, NonCallableMock - -import pytest - -# noinspection PyUnresolvedReferences -from rabbit_consumer.aq_api import ( - verify_kerberos_ticket, - setup_requests, - aq_make, - aq_manage, - create_machine, - delete_machine, - create_host, - delete_host, - set_interface_bootable, - check_host_exists, - AquilonError, - add_machine_nics, - search_machine_by_serial, - search_host_by_machine, -) - - -def test_verify_kerberos_ticket_valid(): - """ - Test that verify_kerberos_ticket returns True when the ticket is valid - """ - with patch("rabbit_consumer.aq_api.subprocess.call") as mocked_call: - # Exit code 0 - i.e. valid ticket - mocked_call.return_value = 0 - assert verify_kerberos_ticket() - mocked_call.assert_called_once_with(["klist", "-s"]) - - -@patch("rabbit_consumer.aq_api.subprocess.call") -def test_verify_kerberos_ticket_invalid(subprocess): - """ - Test that verify_kerberos_ticket raises an exception when the ticket is invalid - """ - # Exit code 1 - i.e. invalid ticket - # Then 0 (kinit), 0 (klist -s) - subprocess.side_effect = [1] - - with pytest.raises(RuntimeError): - verify_kerberos_ticket() - - subprocess.assert_called_once_with(["klist", "-s"]) - - -@patch("rabbit_consumer.aq_api.requests") -@patch("rabbit_consumer.aq_api.Retry") -@patch("rabbit_consumer.aq_api.HTTPAdapter") -@patch("rabbit_consumer.aq_api.verify_kerberos_ticket") -def test_setup_requests(verify_kerb, adapter, retry, requests): - """ - Test that setup_requests sets up the Kerberos ticket and the requests session - correctly - """ - session = requests.Session.return_value - response = session.get.return_value - response.status_code = 200 - - setup_requests(NonCallableMock(), NonCallableMock(), NonCallableMock()) - assert ( - session.verify - == "/etc/grid-security/certificates/aquilon-gridpp-rl-ac-uk-chain.pem" - ) - - verify_kerb.assert_called_once() - retry.assert_called_once_with(total=5, backoff_factor=0.1, status_forcelist=[503]) - adapter.assert_called_once_with(max_retries=retry.return_value) - session.mount.assert_called_once_with("https://", adapter.return_value) - - -@patch("rabbit_consumer.aq_api.requests") -@patch("rabbit_consumer.aq_api.Retry") -@patch("rabbit_consumer.aq_api.HTTPAdapter") -@patch("rabbit_consumer.aq_api.verify_kerberos_ticket") -def test_setup_requests_throws_for_failed(verify_kerb, adapter, retry, requests): - """ - Test that setup_requests throws an exception when the connection fails - """ - session = requests.Session.return_value - response = session.get.return_value - response.status_code = 500 - - with pytest.raises(ConnectionError): - setup_requests(NonCallableMock(), NonCallableMock(), NonCallableMock()) - - assert ( - session.verify - == "/etc/grid-security/certificates/aquilon-gridpp-rl-ac-uk-chain.pem" - ) - - verify_kerb.assert_called_once() - retry.assert_called_once_with(total=5, backoff_factor=0.1, status_forcelist=[503]) - adapter.assert_called_once_with(max_retries=retry.return_value) - session.mount.assert_called_once_with("https://", adapter.return_value) - session.get.assert_called_once() - - -@pytest.mark.parametrize("rest_verb", ["get", "post", "put", "delete"]) -@patch("rabbit_consumer.aq_api.requests") -@patch("rabbit_consumer.aq_api.HTTPKerberosAuth") -@patch("rabbit_consumer.aq_api.verify_kerberos_ticket") -def test_setup_requests_rest_methods(_, kerb_auth, requests, rest_verb): - """ - Test that setup_requests calls the correct REST method - """ - url, desc, params = NonCallableMock(), NonCallableMock(), NonCallableMock() - - session = requests.Session.return_value - - rest_method = getattr(session, rest_verb) - response = rest_method.return_value - response.status_code = 200 - - assert setup_requests(url, rest_verb, desc, params) == response.text - rest_method.assert_called_once_with(url, auth=kerb_auth.return_value, params=params) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_make_calls(config, setup, openstack_address_list): - """ - Test that aq_make calls the correct URLs with the correct parameters - """ - domain = "domain" - config.return_value.aq_url = domain - - aq_make(openstack_address_list) - - expected_url = f"{domain}/host/{openstack_address_list[0].hostname}/command/make" - setup.assert_called_once_with(expected_url, "post", mock.ANY) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_make_aquilon_error(config, setup, openstack_address_list): - """ - Test that aq_make doesn't fail when aquilon error raised - """ - domain = "domain" - config.return_value.aq_url = domain - setup.side_effect = AquilonError() - - # pylint:disable=bare-except - try: - aq_make(openstack_address_list) - except: - assert False, "exception was raised when it should have been ignored" - - expected_url = f"{domain}/host/{openstack_address_list[0].hostname}/command/make" - setup.assert_called_once_with(expected_url, "post", mock.ANY) - - -@pytest.mark.parametrize("hostname", [" ", "", None]) -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_make_none_hostname(config, setup, openstack_address, hostname): - """ - Test that aq_make throws an exception if the field is missing - """ - domain = "https://example.com" - config.return_value.aq_url = domain - - address = openstack_address - address.hostname = hostname - - with pytest.raises(ValueError): - aq_make([address]) - - setup.assert_not_called() - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_manage(config, setup, openstack_address_list, image_metadata): - """ - Test that aq_manage calls the correct URLs with the correct parameters - """ - config.return_value.aq_url = "https://example.com" - - aq_manage(openstack_address_list, image_metadata) - address = openstack_address_list[0] - - expected_param = { - "hostname": address.hostname, - "domain": image_metadata.aq_domain, - "force": True, - } - - expected_url = f"https://example.com/host/{address.hostname}/command/manage" - setup.assert_called_once_with(expected_url, "post", mock.ANY, params=expected_param) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_manage_with_sandbox(config, setup, openstack_address_list, image_metadata): - """ - Test that aq_manage calls the correct URLs with the sandbox - instead of the domain - """ - config.return_value.aq_url = "https://example.com" - - image_metadata.aq_sandbox = "some_sandbox" - - aq_manage(openstack_address_list, image_metadata) - address = openstack_address_list[0] - - expected_param = { - "hostname": address.hostname, - "sandbox": image_metadata.aq_sandbox, - "force": True, - } - - expected_url = f"https://example.com/host/{address.hostname}/command/manage" - setup.assert_called_once_with(expected_url, "post", mock.ANY, params=expected_param) - - -@patch("rabbit_consumer.aq_api.ConsumerConfig") -@patch("rabbit_consumer.aq_api.setup_requests") -def test_aq_create_machine(setup, config, rabbit_message, vm_data): - """ - Test that aq_create_machine calls the correct URL with the correct parameters - """ - config.return_value.aq_url = "https://example.com" - config.return_value.aq_prefix = "prefix_mock" - - returned = create_machine(rabbit_message, vm_data) - - expected_args = { - "model": "vm-openstack", - "serial": vm_data.virtual_machine_id, - "vmhost": rabbit_message.payload.vm_host, - "cpucount": rabbit_message.payload.vcpus, - "memory": rabbit_message.payload.memory_mb, - } - - expected_url = "https://example.com/next_machine/prefix_mock" - assert setup.call_args == call(expected_url, "put", mock.ANY, params=expected_args) - assert returned == setup.return_value - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_delete_machine(config, setup): - """ - Test that aq_delete_machine calls the correct URL with the correct parameters - """ - machine_name = "name_mock" - - config.return_value.aq_url = "https://example.com" - delete_machine(machine_name) - - setup.assert_called_once() - expected_url = "https://example.com/machine/name_mock" - assert setup.call_args == call(expected_url, "delete", mock.ANY) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_create_host(config, setup, openstack_address_list, image_metadata): - """ - Test that aq_create_host calls the correct URL with the correct parameters - """ - machine_name = "machine_name_str" - - env_config = config.return_value - env_config.aq_url = "https://example.com" - - create_host(image_metadata, openstack_address_list, machine_name) - address = openstack_address_list[0] - - expected_params = { - "machine": machine_name, - "ip": address.addr, - "archetype": image_metadata.aq_archetype, - "domain": image_metadata.aq_domain, - "personality": image_metadata.aq_personality, - "osname": image_metadata.aq_os, - "osversion": image_metadata.aq_os_version, - } - - expected_url = f"https://example.com/host/{address.hostname}" - setup.assert_called_once_with(expected_url, "put", mock.ANY, params=expected_params) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_create_host_with_sandbox( - config, setup, openstack_address_list, image_metadata -): - """ - Test that aq_create_host calls the correct URL with the correct parameters - """ - machine_name = "machine_name_str" - - env_config = config.return_value - env_config.aq_url = "https://example.com" - - image_metadata.aq_domain = "example_domain" - image_metadata.aq_sandbox = "example/sandbox" - - create_host(image_metadata, openstack_address_list, machine_name) - address = openstack_address_list[0] - - expected_params = { - "machine": machine_name, - "ip": address.addr, - "archetype": image_metadata.aq_archetype, - "personality": image_metadata.aq_personality, - "osname": image_metadata.aq_os, - "osversion": image_metadata.aq_os_version, - "sandbox": image_metadata.aq_sandbox, - } - - expected_url = f"https://example.com/host/{address.hostname}" - setup.assert_called_once_with(expected_url, "put", mock.ANY, params=expected_params) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_aq_delete_host(config, setup): - """ - Test that aq_delete_host calls the correct URL with the correct parameters - """ - machine_name = "name_mock" - - config.return_value.aq_url = "https://example.com" - delete_host(machine_name) - - setup.assert_called_once() - expected_url = "https://example.com/host/name_mock" - assert setup.call_args == call(expected_url, "delete", mock.ANY) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_add_machine_nic(config, setup, openstack_address_list): - """ - Test that add_machine_interface calls the correct URL with the correct parameters - """ - config.return_value.aq_url = "https://example.com" - - machine_name = "name_str" - add_machine_nics(machine_name, openstack_address_list) - - iface_creation_url = f"https://example.com/machine/{machine_name}/interface/eth0" - - setup.assert_called_once_with( - iface_creation_url, - "put", - mock.ANY, - params={"mac": openstack_address_list[0].mac_addr}, - ) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_update_machine_interface(config, setup): - """ - Test that update_machine_interface calls the correct URL with the correct parameters - """ - machine_name = "machine_str" - interface_name = "iface_name" - - config.return_value.aq_url = "https://example.com" - set_interface_bootable(machine_name=machine_name, interface_name=interface_name) - - setup.assert_called_once() - expected_url = "https://example.com/machine/machine_str/interface/iface_name?boot&default_route" - assert setup.call_args == call(expected_url, "post", mock.ANY) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_check_host_exists(config, setup): - """ - Test that check_host_exists calls the correct URL with the correct parameters - and detects the host exists based on the response - """ - hostname = "host_str" - - config.return_value.aq_url = "https://example.com" - assert check_host_exists(hostname) - - expected_url = f"https://example.com/host/{hostname}" - setup.assert_called_once_with(expected_url, "get", mock.ANY) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_check_host_exists_returns_false(config, setup): - """ - Test that check_host_exists calls the correct URL with the correct parameters - and detects the host does not exist based on the response - """ - hostname = "host_str" - config.return_value.aq_url = "https://example.com" - setup.side_effect = AquilonError(f"Error:\n Host {hostname} not found.") - - assert not check_host_exists(hostname) - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_search_machine_by_serial(config, setup, vm_data): - """ - Test that search_machine_by_serial calls the correct URL with the correct parameters - """ - config.return_value.aq_url = "https://example.com" - response = search_machine_by_serial(vm_data) - - expected_url = "https://example.com/find/machine" - expected_args = {"serial": vm_data.virtual_machine_id} - setup.assert_called_once_with(expected_url, "get", mock.ANY, params=expected_args) - assert response == setup.return_value.strip.return_value - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_search_machine_by_serial_not_found(config, setup, vm_data): - """ - Test that search_machine_by_serial calls the correct URL with the correct parameters - """ - config.return_value.aq_url = "https://example.com" - setup.return_value = "" - response = search_machine_by_serial(vm_data) - - expected_url = "https://example.com/find/machine" - expected_args = {"serial": vm_data.virtual_machine_id} - setup.assert_called_once_with(expected_url, "get", mock.ANY, params=expected_args) - assert response is None - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_search_host_by_machine(config, setup): - """ - Test that search_host_by_machine calls the correct URL with the correct parameters - to return the host name - """ - config.return_value.aq_url = "https://example.com" - response = search_host_by_machine("machine_name") - - expected_url = "https://example.com/find/host" - expected_args = {"machine": "machine_name"} - setup.assert_called_once_with(expected_url, "get", mock.ANY, params=expected_args) - assert response == setup.return_value.strip.return_value - - -@patch("rabbit_consumer.aq_api.setup_requests") -@patch("rabbit_consumer.aq_api.ConsumerConfig") -def test_search_host_by_machine_not_found(config, setup): - """ - Test that search_host_by_machine calls the correct URL with the correct parameters - to return the host name - """ - config.return_value.aq_url = "https://example.com" - setup.return_value = "" - response = search_host_by_machine("machine_name") - - expected_url = "https://example.com/find/host" - expected_args = {"machine": "machine_name"} - setup.assert_called_once_with(expected_url, "get", mock.ANY, params=expected_args) - assert response is None diff --git a/OpenStack-Rabbit-Consumer/tests/test_aq_metadata.py b/OpenStack-Rabbit-Consumer/tests/test_aq_metadata.py deleted file mode 100644 index a3a8c662..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_aq_metadata.py +++ /dev/null @@ -1,109 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Tests the AQ metadata dataclass, including -init from environment variables, and overriding values -""" - -from typing import Dict - -import pytest - -from rabbit_consumer.aq_metadata import AqMetadata - - -@pytest.fixture(name="image_metadata") -def fixture_image_metadata() -> Dict[str, str]: - """ - Creates a dictionary with mock data - which represents an example OpenStack image's metadata - """ - return { - "AQ_ARCHETYPE": "archetype_mock", - "AQ_DOMAIN": "domain_mock", - "AQ_PERSONALITY": "personality_mock", - "AQ_OS": "os_mock", - "AQ_OSVERSION": "osversion_mock", - } - - -def test_aq_metadata_from_initial_dict(image_metadata): - """ - Tests creating an AQ metadata object from an initial dictionary - """ - returned = AqMetadata.from_dict(image_metadata) - - assert returned.aq_archetype == "archetype_mock" - assert returned.aq_domain == "domain_mock" - assert returned.aq_personality == "personality_mock" - assert returned.aq_os == "os_mock" - assert returned.aq_os_version == "osversion_mock" - - -def test_aq_metadata_override_all(image_metadata): - """ - Tests overriding all values in an AQ metadata object - """ - returned = AqMetadata.from_dict(image_metadata) - returned.override_from_vm_meta( - { - "AQ_ARCHETYPE": "archetype_mock_override", - "AQ_DOMAIN": "domain_mock_override", - "AQ_PERSONALITY": "personality_mock_override", - } - ) - - assert returned.aq_archetype == "archetype_mock_override" - assert returned.aq_domain == "domain_mock_override" - assert returned.aq_personality == "personality_mock_override" - - # Check the original values are still there - assert returned.aq_os == "os_mock" - assert returned.aq_os_version == "osversion_mock" - - -def test_aq_metadata_override_with_none_values(image_metadata): - """ - Tests that any invalid values, such as none, null or - whitespace are all ignored when overriding from the image - layer - """ - returned = AqMetadata.from_dict(image_metadata) - returned.override_from_vm_meta( - { - "AQ_ARCHETYPE": "archetype_mock_override", - "AQ_DOMAIN": "None", - "AQ_PERSONALITY": "null", - "AQ_OS": "none", - "AQ_OSVERSION": " ", # Space intentionally left - "AQ_SANDBOX": None, - } - ) - - assert returned.aq_archetype == "archetype_mock_override" - - reference_metadata = AqMetadata.from_dict(image_metadata) - assert returned.aq_domain == reference_metadata.aq_domain - assert returned.aq_os == reference_metadata.aq_os - assert returned.aq_os_version == reference_metadata.aq_os_version - assert returned.aq_sandbox == reference_metadata.aq_sandbox - - -def test_aq_metadata_sandbox(image_metadata): - """ - Tests the sandbox value in an AQ metadata object - maps correctly onto the sandbox value - """ - returned = AqMetadata.from_dict(image_metadata) - returned.override_from_vm_meta( - { - "AQ_SANDBOX": "sandbox_mock", - } - ) - # This should be the only value that has changed - assert returned.aq_sandbox == "sandbox_mock" - - assert returned.aq_archetype == "archetype_mock" - assert returned.aq_personality == "personality_mock" - assert returned.aq_os == "os_mock" - assert returned.aq_os_version == "osversion_mock" diff --git a/OpenStack-Rabbit-Consumer/tests/test_consumer_config.py b/OpenStack-Rabbit-Consumer/tests/test_consumer_config.py deleted file mode 100644 index eb7e2c26..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_consumer_config.py +++ /dev/null @@ -1,40 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Test the consumer config class, this handles the environment variables -that are used to configure the consumer. -""" -import pytest - -from rabbit_consumer.consumer_config import ConsumerConfig - -AQ_FIELDS = [ - ("aq_prefix", "AQ_PREFIX"), - ("aq_url", "AQ_URL"), -] - -OPENSTACK_FIELDS = [ - ("openstack_auth_url", "OPENSTACK_AUTH_URL"), - ("openstack_compute_url", "OPENSTACK_COMPUTE_URL"), - ("openstack_username", "OPENSTACK_USERNAME"), - ("openstack_password", "OPENSTACK_PASSWORD"), -] - -RABBIT_FIELDS = [ - ("rabbit_hosts", "RABBIT_HOST"), - ("rabbit_port", "RABBIT_PORT"), - ("rabbit_username", "RABBIT_USERNAME"), - ("rabbit_password", "RABBIT_PASSWORD"), -] - - -@pytest.mark.parametrize( - "config_name,env_var", AQ_FIELDS + OPENSTACK_FIELDS + RABBIT_FIELDS -) -def test_config_gets_os_env_vars(monkeypatch, config_name, env_var): - """ - Test that the config class pulls the correct values from the environment. - """ - expected = "MOCK_ENV" - monkeypatch.setenv(env_var, expected) - assert getattr(ConsumerConfig(), config_name) == expected diff --git a/OpenStack-Rabbit-Consumer/tests/test_message_consumer.py b/OpenStack-Rabbit-Consumer/tests/test_message_consumer.py deleted file mode 100644 index c305d716..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_message_consumer.py +++ /dev/null @@ -1,495 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Tests the message consumption flow -for the consumer -""" -from unittest.mock import Mock, NonCallableMock, patch, call, MagicMock - -import pytest - -# noinspection PyUnresolvedReferences -from rabbit_consumer.consumer_config import ConsumerConfig -from rabbit_consumer.message_consumer import ( - on_message, - initiate_consumer, - add_aq_details_to_metadata, - handle_create_machine, - handle_machine_delete, - SUPPORTED_MESSAGE_TYPES, - check_machine_valid, - is_aq_managed_image, - get_aq_build_metadata, - delete_machine, - generate_login_str, -) -from rabbit_consumer.vm_data import VmData - - -@pytest.fixture(name="valid_event_type") -def fixture_valid_event_type(): - """ - Fixture for a valid event type - """ - mock = NonCallableMock() - mock.event_type = SUPPORTED_MESSAGE_TYPES["create"] - return mock - - -@patch("rabbit_consumer.message_consumer.consume") -@patch("rabbit_consumer.message_consumer.MessageEventType") -@patch("rabbit_consumer.message_consumer.RabbitMessage") -def test_on_message_parses_json( - message_parser, message_event_type, consume, valid_event_type -): - """ - Test that the function parses the message body as JSON - """ - message_event_type.from_json.return_value = valid_event_type - - with ( - patch("rabbit_consumer.message_consumer.json") as json, - patch("rabbit_consumer.message_consumer.is_aq_managed_image"), - ): - message = Mock() - on_message(message) - - decoded_body = json.loads.return_value - message_parser.from_json.assert_called_once_with(decoded_body["oslo.message"]) - consume.assert_called_once_with(message_parser.from_json.return_value) - message.ack.assert_called_once() - - -@patch("rabbit_consumer.message_consumer.consume") -@patch("rabbit_consumer.message_consumer.is_aq_managed_image") -@patch("rabbit_consumer.message_consumer.MessageEventType") -def test_on_message_ignores_wrong_message_type(message_event_type, is_managed, consume): - """ - Test that the function ignores messages with the wrong message type - """ - message_event = NonCallableMock() - message_event.event_type = "wrong" - message_event_type.from_json.return_value = message_event - - with patch("rabbit_consumer.message_consumer.json"): - message = Mock() - on_message(message) - - is_managed.assert_not_called() - consume.assert_not_called() - message.ack.assert_called_once() - - -@pytest.mark.parametrize("event_type", SUPPORTED_MESSAGE_TYPES.values()) -@patch("rabbit_consumer.message_consumer.consume") -@patch("rabbit_consumer.message_consumer.MessageEventType") -def test_on_message_accepts_event_types(message_event_type, consume, event_type): - """ - Test that the function accepts the correct event types - """ - message_event = NonCallableMock() - message_event.event_type = event_type - message_event_type.from_json.return_value = message_event - - with ( - patch("rabbit_consumer.message_consumer.RabbitMessage"), - patch("rabbit_consumer.message_consumer.json"), - ): - message = Mock() - on_message(message) - - consume.assert_called_once() - message.ack.assert_called_once() - - -@pytest.fixture(name="mocked_config") -def mocked_config_fixture() -> ConsumerConfig: - """ - Provides a mocked input config for the consumer - """ - config = ConsumerConfig() - - # Note: the mismatched spaces are intentional - config.rabbit_hosts = "rabbit_host1, rabbit_host2,rabbit_host3" - config.rabbit_port = 1234 - config.rabbit_username = "rabbit_username" - config.rabbit_password = "rabbit_password" - return config - - -def test_generate_login_str(mocked_config): - """ - Test that the function generates the correct login string - """ - expected = ( - "amqp://" - "rabbit_username:rabbit_password@rabbit_host1:1234," - "rabbit_username:rabbit_password@rabbit_host2:1234," - "rabbit_username:rabbit_password@rabbit_host3:1234" - ) - - assert generate_login_str(mocked_config) == expected - - -def test_generate_login_str_no_hosts(mocked_config): - """ - Test that the function raises if nothing is passed - """ - mocked_config.rabbit_hosts = "" - with pytest.raises(ValueError): - assert generate_login_str(mocked_config) - - -def test_generate_login_non_str(mocked_config): - """ - Test that the function raises if the input is not a string - """ - mocked_config.rabbit_hosts = 1234 - with pytest.raises(ValueError): - assert generate_login_str(mocked_config) - - -@patch("rabbit_consumer.message_consumer.logger") -def test_password_does_not_get_logged(logging, mocked_config): - """ - Test that the password does not get logged - """ - returned_str = generate_login_str(mocked_config) - logging.debug.assert_called_once() - logging_arg = logging.debug.call_args[0][1] - assert mocked_config.rabbit_password in returned_str - - # Check that the password is not in the log message - assert mocked_config.rabbit_username in logging_arg - assert mocked_config.rabbit_password not in logging_arg - - -@patch("rabbit_consumer.message_consumer.verify_kerberos_ticket") -@patch("rabbit_consumer.message_consumer.generate_login_str") -@patch("rabbit_consumer.message_consumer.rabbitpy") -def test_initiate_consumer_channel_setup(rabbitpy, gen_login, _, mocked_config): - """ - Test that the function sets up the channel and queue correctly - """ - with patch("rabbit_consumer.message_consumer.ConsumerConfig") as config: - config.return_value = mocked_config - initiate_consumer() - - gen_login.assert_called_once_with(mocked_config) - - rabbitpy.Connection.assert_called_once_with(gen_login.return_value) - connection = rabbitpy.Connection.return_value.__enter__.return_value - connection.channel.assert_called_once() - channel = connection.channel.return_value.__enter__.return_value - - rabbitpy.Queue.assert_called_once_with(channel, name="ral.info", durable=True) - queue = rabbitpy.Queue.return_value - queue.bind.assert_called_once_with("nova", routing_key="ral.info") - - -@patch("rabbit_consumer.message_consumer.verify_kerberos_ticket") -@patch("rabbit_consumer.message_consumer.on_message") -@patch("rabbit_consumer.message_consumer.rabbitpy") -def test_initiate_consumer_actual_consumption(rabbitpy, message_mock, _): - """ - Test that the function actually consumes messages - """ - queue_messages = [NonCallableMock(), NonCallableMock()] - # We need our mocked queue to act like a generator - rabbitpy.Queue.return_value.__iter__.return_value = queue_messages - - with patch("rabbit_consumer.message_consumer.generate_login_str"): - initiate_consumer() - - message_mock.assert_has_calls([call(message) for message in queue_messages]) - - -@patch("rabbit_consumer.message_consumer.openstack_api") -@patch("rabbit_consumer.message_consumer.aq_api") -def test_add_aq_details_to_metadata( - aq_api, openstack_api, vm_data, openstack_address_list -): - """ - Test that the function adds the hostname to the metadata when the machine exists - """ - openstack_api.check_machine_exists.return_value = True - add_aq_details_to_metadata(vm_data, openstack_address_list) - - hostnames = [i.hostname for i in openstack_address_list] - expected = { - "HOSTNAMES": ",".join(hostnames), - "AQ_STATUS": "SUCCESS", - "AQ_MACHINE": aq_api.search_machine_by_serial.return_value, - } - - openstack_api.check_machine_exists.assert_called_once_with(vm_data) - aq_api.search_machine_by_serial.assert_called_once_with(vm_data) - openstack_api.update_metadata.assert_called_with(vm_data, expected) - - -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_add_hostname_to_metadata_machine_does_not_exist(openstack_api, vm_data): - """ - Test that the function does not add the hostname to the metadata when the machine does not exist - """ - openstack_api.check_machine_exists.return_value = False - add_aq_details_to_metadata(vm_data, []) - - openstack_api.check_machine_exists.assert_called_once_with(vm_data) - openstack_api.update_metadata.assert_not_called() - - -@patch("rabbit_consumer.message_consumer.check_machine_valid") -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_handle_create_machine_skips_invalid(openstack_api, machine_valid): - """ - Test that the function skips invalid machines - """ - machine_valid.return_value = False - vm_data = Mock() - - handle_create_machine(vm_data) - - machine_valid.assert_called_once_with(vm_data) - openstack_api.get_server_networks.assert_not_called() - - -@patch("rabbit_consumer.message_consumer.openstack_api") -@patch("rabbit_consumer.message_consumer.aq_api") -@patch("rabbit_consumer.message_consumer.add_aq_details_to_metadata") -# pylint: disable=too-many-arguments -def test_consume_create_machine_hostnames_good_path( - metadata, aq_api, openstack, rabbit_message, image_metadata -): - """ - Test that the function calls the correct functions in the correct order to register a new machine - """ - with ( - patch("rabbit_consumer.message_consumer.VmData") as data_patch, - patch("rabbit_consumer.message_consumer.check_machine_valid") as check_machine, - patch( - "rabbit_consumer.message_consumer.get_aq_build_metadata" - ) as get_image_meta, - patch("rabbit_consumer.message_consumer.delete_machine") as delete_machine_mock, - ): - check_machine.return_value = True - get_image_meta.return_value = image_metadata - - handle_create_machine(rabbit_message) - - vm_data = data_patch.from_message.return_value - network_details = openstack.get_server_networks.return_value - - data_patch.from_message.assert_called_with(rabbit_message) - openstack.get_server_networks.assert_called_with(vm_data) - - # Check main Aq Flow - delete_machine_mock.assert_called_once_with(vm_data, network_details[0]) - aq_api.create_machine.assert_called_once_with(rabbit_message, vm_data) - machine_name = aq_api.create_machine.return_value - - # Networking - aq_api.add_machine_nics.assert_called_once_with(machine_name, network_details) - - aq_api.set_interface_bootable.assert_called_once_with(machine_name, "eth0") - - aq_api.create_host.assert_called_once_with( - image_metadata, network_details, machine_name - ) - aq_api.aq_make.assert_called_once_with(network_details) - - # Metadata - metadata.assert_called_once_with(vm_data, network_details) - - -@patch("rabbit_consumer.message_consumer.delete_machine") -def test_consume_delete_machine_good_path(delete_machine_mock, rabbit_message): - """ - Test that the function calls the correct functions in the correct order to delete a machine - """ - rabbit_message.payload.metadata.machine_name = "AQ-HOST1" - - with patch("rabbit_consumer.message_consumer.VmData") as data_patch: - handle_machine_delete(rabbit_message) - - delete_machine_mock.assert_called_once_with( - vm_data=data_patch.from_message.return_value - ) - - -@patch("rabbit_consumer.message_consumer.is_aq_managed_image") -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_check_machine_valid(openstack_api, is_aq_managed): - """ - Test that the function returns True when the machine is valid - """ - mock_message = NonCallableMock() - is_aq_managed.return_value = True - - vm_data = VmData.from_message(mock_message) - - openstack_api.check_machine_exists.return_value = True - - assert check_machine_valid(mock_message) - is_aq_managed.assert_called_once_with(vm_data) - openstack_api.check_machine_exists.assert_called_once_with(vm_data) - - -@patch("rabbit_consumer.message_consumer.is_aq_managed_image") -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_check_machine_invalid_image(openstack_api, is_aq_managed): - """ - Test that the function returns False when the image is not AQ managed - """ - mock_message = NonCallableMock() - is_aq_managed.return_value = False - openstack_api.check_machine_exists.return_value = True - vm_data = VmData.from_message(mock_message) - - assert not check_machine_valid(mock_message) - - openstack_api.check_machine_exists.assert_called_once_with(vm_data) - is_aq_managed.assert_called_once_with(vm_data) - - -@patch("rabbit_consumer.message_consumer.is_aq_managed_image") -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_check_machine_invalid_machine(openstack_api, is_aq_managed): - """ - Test that the function returns False when the machine does not exist - """ - mock_message = NonCallableMock() - openstack_api.check_machine_exists.return_value = False - - assert not check_machine_valid(mock_message) - - is_aq_managed.assert_not_called() - openstack_api.check_machine_exists.assert_called_once_with( - VmData.from_message(mock_message) - ) - - -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_is_aq_managed_image(openstack_api, vm_data): - """ - Test that the function returns True when the image is AQ managed - """ - openstack_api.get_image.return_value.metadata = {"AQ_OS": "True"} - - assert is_aq_managed_image(vm_data) - openstack_api.get_image.assert_called_once_with(vm_data) - - -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_is_aq_managed_image_missing_image(openstack_api, vm_data): - """ - Test that the function returns False when the image is not AQ managed - """ - openstack_api.get_image.return_value = None - - assert not is_aq_managed_image(vm_data) - openstack_api.get_image.assert_called_once_with(vm_data) - - -@patch("rabbit_consumer.message_consumer.VmData") -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_is_aq_managed_image_missing_key(openstack_api, vm_data): - """ - Test that the function returns False when the image is not AQ managed - """ - openstack_api.get_image.return_value.metadata = {} - - assert not is_aq_managed_image(vm_data) - openstack_api.get_image.assert_called_once_with(vm_data) - - -@patch("rabbit_consumer.message_consumer.AqMetadata") -@patch("rabbit_consumer.message_consumer.openstack_api") -def test_get_aq_build_metadata(openstack_api, aq_metadata_class, vm_data): - """ - Test that the function returns the correct metadata - """ - aq_metadata_obj: MagicMock = get_aq_build_metadata(vm_data) - - # We should first construct from an image - assert aq_metadata_obj == aq_metadata_class.from_dict.return_value - aq_metadata_class.from_dict.assert_called_once_with( - openstack_api.get_image.return_value.metadata - ) - - # Then override with an object - openstack_api.get_server_metadata.assert_called_once_with(vm_data) - aq_metadata_obj.override_from_vm_meta.assert_called_once_with( - openstack_api.get_server_metadata.return_value - ) - - -@patch("rabbit_consumer.message_consumer.aq_api") -def test_delete_machine_hostname_only(aq_api, vm_data, openstack_address): - """ - Tests that the function deletes a host then exits if no machine is found - """ - aq_api.check_host_exists.return_value = True - aq_api.search_machine_by_serial.return_value = None - - delete_machine(vm_data, openstack_address) - aq_api.delete_host.assert_called_once_with(openstack_address.hostname) - aq_api.delete_machine.assert_not_called() - - -@patch("rabbit_consumer.message_consumer.aq_api") -def test_delete_machine_by_serial(aq_api, vm_data, openstack_address): - """ - Tests that the function deletes a host then a machine - assuming both were found - """ - # Assume our host address doesn't match the machine record - # but the machine does have a hostname which is valid... - aq_api.check_host_exists.side_effect = [False, True] - - aq_api.search_host_by_machine.return_value = "host.example.com" - aq_api.get_machine_details.return_value = "" - - delete_machine(vm_data, openstack_address) - - aq_api.check_host_exists.assert_has_calls( - [call(openstack_address.hostname), call("host.example.com")] - ) - aq_api.delete_host.assert_called_once_with("host.example.com") - - -@patch("rabbit_consumer.message_consumer.aq_api") -@patch("rabbit_consumer.message_consumer.socket") -def test_delete_machine_no_hostname(socket_api, aq_api, vm_data): - """ - Tests - """ - aq_api.check_host_exists.return_value = False - - ip_address = "127.0.0.1" - socket_api.gethostbyname.return_value = ip_address - - machine_name = aq_api.search_machine_by_serial.return_value - aq_api.get_machine_details.return_value = f"eth0: {ip_address}" - - delete_machine(vm_data, NonCallableMock()) - aq_api.delete_address.assert_called_once_with(ip_address, machine_name) - aq_api.delete_interface.assert_called_once_with(machine_name) - - -@patch("rabbit_consumer.message_consumer.aq_api") -@patch("rabbit_consumer.message_consumer.socket") -def test_delete_machine_always_called(socket_api, aq_api, vm_data): - """ - Tests that the function always calls the delete machine function - """ - aq_api.check_host_exists.return_value = False - socket_api.gethostbyname.return_value = "123123" - - aq_api.get_machine_details.return_value = "Machine Details" - - machine_name = "machine_name" - aq_api.search_machine_by_serial.return_value = machine_name - - delete_machine(vm_data, NonCallableMock()) - aq_api.delete_machine.assert_called_once_with(machine_name) diff --git a/OpenStack-Rabbit-Consumer/tests/test_openstack_address.py b/OpenStack-Rabbit-Consumer/tests/test_openstack_address.py deleted file mode 100644 index 631ee1ec..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_openstack_address.py +++ /dev/null @@ -1,161 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Tests the dataclass representing OpenStack network addresses -""" -import copy -from unittest.mock import patch - -import pytest - -from rabbit_consumer.openstack_address import OpenstackAddress - - -@pytest.fixture(name="example_dict_internal") -def fixture_example_dict_internal(): - """ - Creates a dictionary with mock data representing the network addresses of an internal VM - """ - # Adapted from real response from OpenStack API - return { - "Internal": [ - { - "OS-EXT-IPS-MAC:mac_addr": "fa:ca:aa:aa:aa:aa", - "version": 4, - "addr": "127.0.0.63", - "OS-EXT-IPS:type": "fixed", - } - ] - } - - -@pytest.fixture(name="example_dict_two_entries_internal") -def fixture_example_dict_two_entries_internal(example_dict_internal): - """ - Creates a dictionary with mock data representing the network addresses of an internal VM with two entries - """ - second = copy.deepcopy(example_dict_internal["Internal"][0]) - second["addr"] = "127.0.0.64" - example_dict_internal["Internal"].append(second) - return example_dict_internal - - -@patch("rabbit_consumer.openstack_address.socket.gethostbyaddr") -def test_openstack_address_single_case_internal(mock_socket, example_dict_internal): - """ - Tests the OpenstackAddress class with a single internal network address - """ - result = OpenstackAddress.get_internal_networks(example_dict_internal) - assert len(result) == 1 - assert result[0].version == 4 - assert result[0].addr == "127.0.0.63" - assert result[0].mac_addr == "fa:ca:aa:aa:aa:aa" - mock_socket.assert_called_once() - - -@patch("rabbit_consumer.openstack_address.socket.gethostbyaddr") -def test_openstack_address_multiple_networks_internal( - mock_socket, example_dict_two_entries_internal -): - """ - Tests the OpenstackAddress class with multiple internal network addresses - """ - result = OpenstackAddress.get_internal_networks(example_dict_two_entries_internal) - assert len(result) == 2 - assert result[0].version == 4 - assert result[0].addr == "127.0.0.63" - assert result[1].addr == "127.0.0.64" - mock_socket.assert_called() - - -@patch("rabbit_consumer.openstack_address.socket.gethostbyaddr") -def test_openstack_address_populate_internal( - mock_socket, example_dict_two_entries_internal -): - """ - Tests the OpenstackAddress class with multiple internal network addresses - """ - mock_socket.side_effect = [("hostname", None, None), ("hostname2", None, None)] - result = OpenstackAddress.get_internal_networks(example_dict_two_entries_internal) - - assert result[0].hostname == "hostname" - assert result[1].hostname == "hostname2" - - assert mock_socket.call_count == 2 - assert mock_socket.call_args_list[0][0][0] == "127.0.0.63" - assert mock_socket.call_args_list[1][0][0] == "127.0.0.64" - - -@pytest.fixture(name="example_dict_services") -def fixture_example_dict_services(): - """ - Creates a dictionary with mock data representing the services network addresses of a VM - """ - # Adapted from real response from OpenStack API - return { - "Services": [ - { - "OS-EXT-IPS-MAC:mac_addr": "fa:ca:aa:aa:aa:aa", - "version": 4, - "addr": "127.0.0.63", - "OS-EXT-IPS:type": "fixed", - } - ] - } - - -@pytest.fixture(name="example_dict_two_entries_services") -def fixture_example_dict_two_entries_services(example_dict_services): - """ - Creates a dictionary with mock data representing the services network addresses of a VM with two entries - """ - second = copy.deepcopy(example_dict_services["Services"][0]) - second["addr"] = "127.0.0.64" - example_dict_services["Services"].append(second) - return example_dict_services - - -@patch("rabbit_consumer.openstack_address.socket.gethostbyaddr") -def test_openstack_address_single_case_services(mock_socket, example_dict_services): - """ - Tests the OpenstackAddress class with a single services network address - """ - result = OpenstackAddress.get_services_networks(example_dict_services) - assert len(result) == 1 - assert result[0].version == 4 - assert result[0].addr == "127.0.0.63" - assert result[0].mac_addr == "fa:ca:aa:aa:aa:aa" - mock_socket.assert_called_once() - - -@patch("rabbit_consumer.openstack_address.socket.gethostbyaddr") -def test_openstack_address_multiple_networks_services( - mock_socket, example_dict_two_entries_services -): - """ - Tests the OpenstackAddress class with multiple services network addresses - """ - result = OpenstackAddress.get_services_networks(example_dict_two_entries_services) - assert len(result) == 2 - assert result[0].version == 4 - assert result[0].addr == "127.0.0.63" - assert result[1].addr == "127.0.0.64" - mock_socket.assert_called() - - -@patch("rabbit_consumer.openstack_address.socket.gethostbyaddr") -def test_openstack_address_populate_services( - mock_socket, example_dict_two_entries_services -): - """ - Tests the OpenstackAddress class with services multiple network addresses - """ - mock_socket.side_effect = [("hostname", None, None), ("hostname2", None, None)] - result = OpenstackAddress.get_services_networks(example_dict_two_entries_services) - - assert result[0].hostname == "hostname" - assert result[1].hostname == "hostname2" - - assert mock_socket.call_count == 2 - assert mock_socket.call_args_list[0][0][0] == "127.0.0.63" - assert mock_socket.call_args_list[1][0][0] == "127.0.0.64" diff --git a/OpenStack-Rabbit-Consumer/tests/test_openstack_api.py b/OpenStack-Rabbit-Consumer/tests/test_openstack_api.py deleted file mode 100644 index 0a20b478..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_openstack_api.py +++ /dev/null @@ -1,161 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Tests that the Openstack API functions are invoked -as expected with the correct params -""" -from unittest.mock import NonCallableMock, patch - -# noinspection PyUnresolvedReferences -from rabbit_consumer.openstack_api import ( - update_metadata, - OpenstackConnection, - check_machine_exists, - get_server_details, - get_server_networks, - get_image, -) - - -@patch("rabbit_consumer.openstack_api.ConsumerConfig") -@patch("rabbit_consumer.openstack_api.openstack.connect") -def test_openstack_connection(mock_connect, mock_config): - """ - Test that the OpenstackConnection context manager calls the correct functions - """ - with OpenstackConnection() as conn: - mock_connect.assert_called_once_with( - auth_url=mock_config.return_value.openstack_auth_url, - username=mock_config.return_value.openstack_username, - password=mock_config.return_value.openstack_password, - project_name="admin", - user_domain_name="Default", - project_domain_name="default", - ) - - # Pylint is unable to see that openstack.connect returns a mock - # pylint: disable=no-member - assert conn == mock_connect.return_value - # pylint: disable=no-member - assert conn.close.call_count == 0 - - # Check close is called when the context manager exits - # pylint: disable=no-member - assert conn.close.call_count == 1 - - -@patch("rabbit_consumer.openstack_api.OpenstackConnection") -def test_check_machine_exists_existing_machine(conn, vm_data): - """ - Test that the function returns True when the machine exists - """ - context = conn.return_value.__enter__.return_value - context.compute.find_server.return_value = NonCallableMock() - found = check_machine_exists(vm_data) - - conn.assert_called_once_with() - context.compute.find_server.assert_called_with(vm_data.virtual_machine_id) - assert isinstance(found, bool) and found - - -@patch("rabbit_consumer.openstack_api.OpenstackConnection") -def test_check_machine_exists_deleted_machine(conn, vm_data): - """ - Test that the function returns False when the machine does not exist - """ - context = conn.return_value.__enter__.return_value - context.compute.find_server.return_value = None - found = check_machine_exists(vm_data) - - conn.assert_called_once_with() - context = conn.return_value.__enter__.return_value - context.compute.find_server.assert_called_with(vm_data.virtual_machine_id) - assert isinstance(found, bool) and not found - - -@patch("rabbit_consumer.openstack_api.OpenstackConnection") -@patch("rabbit_consumer.openstack_api.get_server_details") -def test_update_metadata(server_details, conn, vm_data): - """ - Test that the function calls the correct functions to update the metadata on a VM - """ - server_details.return_value = NonCallableMock() - update_metadata(vm_data, {"key": "value"}) - - server_details.assert_called_once_with(vm_data) - - conn.assert_called_once_with() - context = conn.return_value.__enter__.return_value - context.compute.set_server_metadata.assert_called_once_with( - server_details.return_value, **{"key": "value"} - ) - - -@patch("rabbit_consumer.openstack_api.OpenstackConnection") -def test_get_server_details(conn, vm_data): - """ - Test that the function calls the correct functions to get the details of a VM - """ - context = conn.return_value.__enter__.return_value - context.compute.servers.return_value = [NonCallableMock()] - - result = get_server_details(vm_data) - - context.compute.servers.assert_called_once_with( - uuid=vm_data.virtual_machine_id, all_projects=True - ) - - assert result == context.compute.servers.return_value[0] - - -@patch("rabbit_consumer.openstack_api.get_server_details") -@patch("rabbit_consumer.openstack_api.OpenstackAddress") -def test_get_server_networks_internal(address, server_details, vm_data): - """ - Test that the function calls the correct functions to get the networks of a VM - """ - server_details.return_value.addresses = {"Internal": []} - - get_server_networks(vm_data) - address.get_internal_networks.assert_called_once_with( - server_details.return_value.addresses - ) - - -@patch("rabbit_consumer.openstack_api.get_server_details") -@patch("rabbit_consumer.openstack_api.OpenstackAddress") -def test_get_server_networks_services(address, server_details, vm_data): - """ - Test that the function calls the correct functions to get the networks of a VM - """ - server_details.return_value.addresses = {"Services": []} - - get_server_networks(vm_data) - address.get_services_networks.assert_called_once_with( - server_details.return_value.addresses - ) - - -@patch("rabbit_consumer.openstack_api.get_server_details") -def test_get_server_networks_no_network(server_details, vm_data): - """ - Tests that an empty list is returned when there are no networks - """ - server_details.return_value = NonCallableMock() - server_details.return_value.addresses = {} - - result = get_server_networks(vm_data) - assert not result - - -@patch("rabbit_consumer.openstack_api.get_server_details") -def test_get_image_no_image_id(server_details, vm_data): - """ - Tests that get image handles an empty image UUID - usually when a volume was used instead of an image - """ - server_details.return_value = NonCallableMock() - server_details.return_value.image.id = None - - result = get_image(vm_data) - assert not result diff --git a/OpenStack-Rabbit-Consumer/tests/test_rabbit_message.py b/OpenStack-Rabbit-Consumer/tests/test_rabbit_message.py deleted file mode 100644 index 15a060a3..00000000 --- a/OpenStack-Rabbit-Consumer/tests/test_rabbit_message.py +++ /dev/null @@ -1,76 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation -""" -Tests rabbit messages are consumed correctly from the queue -""" -import json -from typing import Dict - -import pytest - -from rabbit_consumer.rabbit_message import RabbitMessage - - -def _example_dict(with_metadata: bool) -> Dict: - """ - Returns an example dictionary for testing, based on real data from the RabbitMQ queue - """ - example_dict = { - "event_type": "compute.instance.create.end", - "_context_project_name": "project_name", - "_context_project_id": "project_id", - "_context_user_name": "user_name", - "payload": { - "instance_id": "instance_id", - "display_name": "vm_name", - "vcpus": 1, - "memory_mb": 1024, - "host": "vm_host", - "metadata": {}, - }, - } - - if with_metadata: - example_dict["payload"]["metadata"] = {"AQ_MACHINENAME": "machine_name"} - - return example_dict - - -@pytest.fixture(name="example_json") -def fixture_example_json(): - """ - Returns an example JSON string for testing, based on real data from the RabbitMQ queue - """ - return json.dumps(_example_dict(with_metadata=False)) - - -@pytest.fixture(name="example_json_with_metadata") -def fixture_example_json_with_metadata(): - """ - Returns an example JSON string for testing, with metadata included - """ - return json.dumps(_example_dict(with_metadata=True)) - - -def test_rabbit_json_load(example_json): - """ - Tests that RabbitMessage.from_json() can load a JSON string and deserialise it into dataclasses - """ - deserialized = RabbitMessage.from_json(example_json) - assert deserialized.event_type == "compute.instance.create.end" - assert deserialized.project_name == "project_name" - assert deserialized.project_id == "project_id" - assert deserialized.user_name == "user_name" - assert deserialized.payload.instance_id == "instance_id" - assert deserialized.payload.vm_name == "vm_name" - assert deserialized.payload.vcpus == 1 - assert deserialized.payload.memory_mb == 1024 - assert deserialized.payload.vm_host == "vm_host" - - -def test_with_metadata(example_json_with_metadata): - """ - Tests that RabbitMessage.from_json() can load a JSON string and deserialise it into dataclasses - """ - deserialized = RabbitMessage.from_json(example_json_with_metadata) - assert deserialized.payload.metadata.machine_name == "machine_name" diff --git a/OpenStack-Rabbit-Consumer/version.txt b/OpenStack-Rabbit-Consumer/version.txt deleted file mode 100644 index fd2a0186..00000000 --- a/OpenStack-Rabbit-Consumer/version.txt +++ /dev/null @@ -1 +0,0 @@ -3.1.0 diff --git a/charts/rabbit-consumer/.helmignore b/charts/rabbit-consumer/.helmignore deleted file mode 100644 index 0e8a0eb3..00000000 --- a/charts/rabbit-consumer/.helmignore +++ /dev/null @@ -1,23 +0,0 @@ -# Patterns to ignore when building packages. -# This supports shell glob matching, relative path matching, and -# negation (prefixed with !). Only one pattern per line. -.DS_Store -# Common VCS dirs -.git/ -.gitignore -.bzr/ -.bzrignore -.hg/ -.hgignore -.svn/ -# Common backup files -*.swp -*.bak -*.tmp -*.orig -*~ -# Various IDEs -.project -.idea/ -*.tmproj -.vscode/ diff --git a/charts/rabbit-consumer/Chart.yaml b/charts/rabbit-consumer/Chart.yaml deleted file mode 100644 index 327b042c..00000000 --- a/charts/rabbit-consumer/Chart.yaml +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: v2 -name: rabbit-consumer-chart -description: A Helm chart for Rabbit Consumers -type: application - -# This is the chart version. This version number should be incremented each time you make changes -# to the chart and its templates, including the app version. -# Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 1.8.0 - -# This is the version number of the application being deployed. This version number should be -# incremented each time you make changes to the application. Versions are not expected to -# follow Semantic Versioning. They should reflect the version the application is using. -# It is recommended to use it with quotes. -appVersion: "v3.1.0" diff --git a/charts/rabbit-consumer/dev-values.yaml b/charts/rabbit-consumer/dev-values.yaml deleted file mode 100644 index 4e051994..00000000 --- a/charts/rabbit-consumer/dev-values.yaml +++ /dev/null @@ -1,21 +0,0 @@ -consumer: - logLevel: DEBUG - image: - repository: rabbit-consumer - tag: local - # This allows us to build the image locally and use it in the chart - pullPolicy: Never - - aquilon: - defaultPrefix: vm-openstack-Dev- - - rabbitmq: - host: dev-openstack.stfc.ac.uk - - openstack: - authUrl: https://dev-openstack.stfc.ac.uk:5000/v3 - computeUrl: https://dev-openstack.stfc.ac.uk:8774/v2.1 - projectId: c9aee696c4b54f12a645af2c951327dc - -kerberosSidecar: - principle: "HTTP/dev-service1.nubes.rl.ac.uk" diff --git a/charts/rabbit-consumer/include/aquilon-gridpp-rl-ac-uk-chain.pem b/charts/rabbit-consumer/include/aquilon-gridpp-rl-ac-uk-chain.pem deleted file mode 100644 index cf2198ba..00000000 --- a/charts/rabbit-consumer/include/aquilon-gridpp-rl-ac-uk-chain.pem +++ /dev/null @@ -1,70 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIEczCCA1ugAwIBAgIDAPXoMA0GCSqGSIb3DQEBCwUAMFMxCzAJBgNVBAYTAlVL -MRMwEQYDVQQKEwplU2NpZW5jZUNBMRIwEAYDVQQLEwlBdXRob3JpdHkxGzAZBgNV -BAMTElVLIGUtU2NpZW5jZSBDQSAyQjAeFw0yMjA3MTgxNTMxMDNaFw0yMzA4MTcx -NTMxMDNaMF8xCzAJBgNVBAYTAlVLMREwDwYDVQQKDAhlU2NpZW5jZTENMAsGA1UE -CwwEQ0xSQzEMMAoGA1UEBwwDUkFMMSAwHgYDVQQDDBdhcXVpbG9uLmdyaWRwcC5y -bC5hYy51azCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALBEuJy4ORc4 -E1r+QHl7QUmWnhV2Cej4KNdp9OnEL6pS27DlbY7LsUL8d/WFaco6VEZfRMWibuiK -sEcLNm9c1IR/feJKyHjuUhavLUue736Q6PflVf4LWeFckVN6J0K1Mn3hsWvXeYuf -afPgFPX8XGp8W0cfsjosNgAdQTwjVYq6GdolymJZGQa8kRswYA2+g1T+dFEYd0jk -+i7Z3KXNYjaEPcuHWjU+hfwHOtg5Y3PaLS/zadgSoMUdrO4LTIAegvRuUP3LSoCs -vAO30f4PekTT/4BtYyOvzZaemw0M98yejPlmGtNk5H7bHr4TUuHxtMZMjS/JwPya -6mjhNFvmH6cCAwEAAaOCAUIwggE+MAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQD -AgSwMB0GA1UdJQQWMBQGCCsGAQUFBwMCBggrBgEFBQcDATAdBgNVHQ4EFgQUe8fL -ki5np98m9kXDiD3RQbp5WZQwHwYDVR0jBBgwFoAUEqW/kZ9/4q9qXAny4vpZ4Dbh -81UwIgYDVR0RBBswGYIXYXF1aWxvbi5ncmlkcHAucmwuYWMudWswJQYDVR0SBB4w -HIEac3VwcG9ydEBncmlkLXN1cHBvcnQuYWMudWswNwYDVR0gBDAwLjAPBg0rBgEE -AdkvAQEBAgIAMAwGCiqGSIb3TAUCAgEwDQYLKoZIhvdMBQIDAwIwOwYDVR0fBDQw -MjAwoC6gLIYqaHR0cDovL2NybC5jYS5uZ3MuYWMudWsvY3JsL2VzY2llbmNlMmIu -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQAw7bt8O9auebBkqPFEWpKvl/bbNApmB4WW -QV76JMzDhFOX1WhKMDycesEIwln8q63JXLMX7hBQ/cDL3/0LxknmK3xojkBCrmoU -yn1mWm3wJ5twdO6HasKbvnkPp6pLzf1DVrj9/NuaWRTlftN0NViTK+kMAUGLxMk5 -28lNtBEWR2+Vs0w4s0YPIVGAkb+BZ1KE6sgCaKVffi+/rhpT/VIaT/R0blcOsrZ0 -idABWPJ3KztAZZ8cJX3uNeZrBAnx563jxk7dP/CoJYhYxkoWxeSuiKSWIdtIqgW2 -nKmOUEnelW7LWiU9IMDpY4dwaJ4GJJqfNMvnGVivTlFyLbjf+c6W ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgICASMwDQYJKoZIhvcNAQELBQAwVDELMAkGA1UEBhMCVUsx -FTATBgNVBAoTDGVTY2llbmNlUm9vdDESMBAGA1UECxMJQXV0aG9yaXR5MRowGAYD -VQQDExFVSyBlLVNjaWVuY2UgUm9vdDAeFw0xMTA2MTgxMzAwMDBaFw0yNzEwMzAw -OTAwMDBaMFMxCzAJBgNVBAYTAlVLMRMwEQYDVQQKEwplU2NpZW5jZUNBMRIwEAYD -VQQLEwlBdXRob3JpdHkxGzAZBgNVBAMTElVLIGUtU2NpZW5jZSBDQSAyQjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKkLgb2eIcly4LZfj0Rf5F7s+HE/ -6Tvpf4jsKkm7qs33y3EEudCbcPwQKjS2MgytPv+8xpEPHqy/hqTseNlZ6oJgc+V8 -xlJ+0iws882Ca8a9ZJ/iGQH9UzXU4q35ArN3cbwoWAAvMvzZ6hUV86fAAQ1AueQN -6h7/tnfYfaUMiB4PNxucmouMHDJGmYzl47FtlLeHUr2c4m/oWSG5pADIvGFpWFHj -NIw8/x4n97w5/ks0tc/8/5Q6xzUfCX/VfqciQCvKcui2J5MBhUlBDLenzwqvUytB -4XAwX/pRcKmnFEYwoc9OKGExNx9tn9RjQYJAC/KLb44Jqno9l0eRxu3uw4sCAwEA -AaOBnzCBnDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E -FgQUEqW/kZ9/4q9qXAny4vpZ4Dbh81UwHwYDVR0jBBgwFoAUXvgbSKZ3ayk8LgBT -Mytjont+k8AwOQYDVR0fBDIwMDAuoCygKoYoaHR0cDovL2NybC5jYS5uZ3MuYWMu -dWsvY3JsL3Jvb3QtY3JsLmRlcjANBgkqhkiG9w0BAQsFAAOCAQEArd5TFOo9SzGW -0+KrAdzzf60zh4Wy//vZz4tgt7NeDbNpz2TZROBAClSu7oLPiruzgnhNP/Vxeu0s -pI41wRQsh0DVxhM+9ZFOskH+OdmHzKagoejvHh6Jt8WNN0eBLzN8Bvsue7ImJPaY -cf/Qj1ZTBhaRHcMsLNnqak3un/P+uLPxqSuxVKMtC8es/jqosS4czJ3dgs1hgFy9 -nPQiwuIyf3OJ9eifAOGXk9Nlpha9C54zhc+hAkSLnpx/FhPjwLgpwDRgDJud6otH -15x3qZqXNx7xbYfeHaM1R1HMEjfVdzKCTY4zsqNEGPEF/0nUQSFk6KQVz0/ugNmI -9qoDx3FeEg== ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -MIIDhjCCAm6gAwIBAgIBADANBgkqhkiG9w0BAQUFADBUMQswCQYDVQQGEwJVSzEV -MBMGA1UEChMMZVNjaWVuY2VSb290MRIwEAYDVQQLEwlBdXRob3JpdHkxGjAYBgNV -BAMTEVVLIGUtU2NpZW5jZSBSb290MB4XDTA3MTAzMDA5MDAwMFoXDTI3MTAzMDA5 -MDAwMFowVDELMAkGA1UEBhMCVUsxFTATBgNVBAoTDGVTY2llbmNlUm9vdDESMBAG -A1UECxMJQXV0aG9yaXR5MRowGAYDVQQDExFVSyBlLVNjaWVuY2UgUm9vdDCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM3ORtmmUHotwDTfAH5/eIlo3+BK -oElDeaeN5Sg2lhPu0laPch7pHKSzlqyHmZGsk3fZb8hBmO0lD49+dKnA31zLU6ko -Bje1THqdrGZPcjTm0lhc/SjzsBtWm4oC/bpYBACliB9wa3eSuU4Rqq71n7+4J+WO -KvaDHvaTdRYE3pyie2Xe5QTI8CXedCMh18+EdFvwlV79dlmNRNY93ZWUu6POL6d+ -LapQkUmasXLjyjNzcoPXgDyGauHOqmyqxuPx4tDTsC25nKr+7K5k3T+lplJ/jMkQ -l/QHgqnABBXQILzzrt0a8nQdM8ONA+bht+8sy4eN/0zMulNj8kAzrutkhJsCAwEA -AaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FF74G0imd2spPC4AUzMrY6J7fpPAMB8GA1UdIwQYMBaAFF74G0imd2spPC4AUzMr -Y6J7fpPAMA0GCSqGSIb3DQEBBQUAA4IBAQCT0a0kcE8oVYzjTGrd5ayvOI+vbdiY -MG7/2V2cILKIts7DNdIrEIonlV0Cw96pQShjRRIizSHG5eH1kLJcbK/DpgX6QuPR -WhWR5wDJ4vaz0qTmUpwEpsT9mmyehhHbio/EsYM7LesScJrO2piD2Bf6pFUMR1LC -scAqN7fTXJSg6Mj6tOhpWpPwM9WSwQn8sDTgL0KkrjVOVaeJwlyNyEfUpJuFIgTl -rEnkXqhWQ6ozArDonB4VHlew6eqIGaxWB/yWMNvY5K+b1j5fdcMelzA45bFucOf1 -Ag+odBgsGZahpFgOqKvBuvSrk/8+ie8I2CVYwT486pPnb5JFgHgUfZo8 ------END CERTIFICATE----- diff --git a/charts/rabbit-consumer/include/kinit.sh b/charts/rabbit-consumer/include/kinit.sh deleted file mode 100644 index 5299e00f..00000000 --- a/charts/rabbit-consumer/include/kinit.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation - -# Adapted from https://cloud.redhat.com/blog/kerberos-sidecar-container - -set -ex - -while true - -do -echo "kinit at $(date --universal)" - -kinit -V -k $KRB5_PRINCIPLE -# Check that the ticket is valid and echo the ticket -klist -c /shared/krb5cc -s && klist -c /shared/krb5cc - -echo "$(date --universal): Waiting for $PERIOD_SECONDS seconds..." -sleep $PERIOD_SECONDS - -done \ No newline at end of file diff --git a/charts/rabbit-consumer/include/krb5.conf b/charts/rabbit-consumer/include/krb5.conf deleted file mode 100644 index f7087e5e..00000000 --- a/charts/rabbit-consumer/include/krb5.conf +++ /dev/null @@ -1,41 +0,0 @@ -[logging] - default = FILE:/var/log/krb5libs.log - kdc = FILE:/var/log/krb5kdc.log - admin_server = FILE:/var/log/kadmind.log - -[libdefaults] - ticket_lifetime = 24000 - default_realm = FED.CCLRC.AC.UK - -[domain_realm] - .grid-support.ac.uk = FED.CCLRC.AC.UK - .rl.ac.uk = FED.CCLRC.AC.UK - .dl.ac.uk = FED.CCLRC.AC.UK - .stfc.ac.uk = FED.CCLRC.AC.UK - .cclrc.ac.uk = FED.CCLRC.AC.UK - .clrc.ac.uk = FED.CCLRC.AC.UK - - grid-support.ac.uk = FED.CCLRC.AC.UK - rl.ac.uk = FED.CCLRC.AC.UK - dl.ac.uk = FED.CCLRC.AC.UK - stfc.ac.uk = FED.CCLRC.AC.UK - cclrc.ac.uk = FED.CCLRC.AC.UK - clrc.ac.uk = FED.CCLRC.AC.UK - -[realms] - FED.CCLRC.AC.UK = { - kdc = FED.CCLRC.AC.UK - admin_server = FED.CCLRC.AC.UK - default_domain = RL.AC.UK - } - RL.AC.UK = { - kdc = FED.CCLRC.AC.UK - admin_server = FED.CCLRC.AC.UK - } - -[pam] - debug = false - ticket_lifetime = 36000 - renew_lifetime = 36000 - forwardable = true - krb4_convert = false diff --git a/charts/rabbit-consumer/include/sidecar-entrypoint.sh b/charts/rabbit-consumer/include/sidecar-entrypoint.sh deleted file mode 100644 index c94c8a69..00000000 --- a/charts/rabbit-consumer/include/sidecar-entrypoint.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash -# SPDX-License-Identifier: Apache-2.0 -# Copyright (c) 2023 United Kingdom Research and Innovation - -set -ex - -dnf install -y krb5-workstation - -echo "Principle set to: $KRB5_PRINCIPLE" - -useradd -r krb5user -# Run kinit.sh as system user to reduce risk of privilege escalation -su --preserve-environment -c '/etc/entrypoints.d/kinit.sh' krb5user - -# We should never get here -echo "ERROR: Exited while loop" -exit 1 diff --git a/charts/rabbit-consumer/prod-values.yaml b/charts/rabbit-consumer/prod-values.yaml deleted file mode 100644 index 8f61f952..00000000 --- a/charts/rabbit-consumer/prod-values.yaml +++ /dev/null @@ -1,14 +0,0 @@ -consumer: - aquilon: - defaultPrefix: vm-openstack-Prod- - - rabbitmq: - host: hv747.nubes.rl.ac.uk - - openstack: - authUrl: https://openstack.stfc.ac.uk:5000/v3 - computeUrl: https://openstack.stfc.ac.uk:8774/v2.1 - projectId: 4de86830e89b4a46b590536571b6ccd4 - -kerberosSidecar: - principle: "HTTP/service1.nubes.rl.ac.uk" diff --git a/charts/rabbit-consumer/readme.md b/charts/rabbit-consumer/readme.md deleted file mode 100644 index 059b60bd..00000000 --- a/charts/rabbit-consumer/readme.md +++ /dev/null @@ -1,125 +0,0 @@ -Cluster Prep -============ - -This application does not require persistent storage and is completely standalone. - -- Create a namespace for the application, if required: -`kubectl create namespace rabbit-consumers` - -- Install secret from an existing krb5.keytab, this should match the principle used in the values.yaml file: - -`kubectl create secret generic rabbit-consumer-keytab --from-file krb5.keytab -n rabbit-consumers` - -- Install secrets for the Rabbit and Openstack credentials - based on the following .yaml template: - -``` -kind: Namespace -apiVersion: v1 -metadata: - name: rabbit-consumer - labels: - name: rabbit-consumer ---- -apiVersion: v1 -kind: Secret -metadata: - # This should match the values.yaml values - name: openstack-credentials - namespace: rabbit-consumers -type: Opaque -stringData: - OPENSTACK_USERNAME: - OPENSTACK_PASSWORD: ---- -apiVersion: v1 -kind: Secret -metadata: - name: rabbit-credentials - namespace: rabbit-consumers -type: Opaque -stringData: - RABBIT_USERNAME: - RABBIT_PASSWORD: -``` - -Environment Templates -===================== - -Multiple values files are provided to target various environments: - -- values.yaml: Attributes common to all environments (e.g. Aquilon URL). If you are using the repo this can be omitted. -- dev-values.yaml: Attributes for the dev Openstack environment. This assumes the PR is merged as it points to the `qa` tag. -- prod-values.yaml: Attributes for production. This does not include the tag, instead relying on the app version in Chart.yaml -- staging-values.yaml: Targets the dev Openstack environment, but pulls the latest build from the most recent PR. (Typically used to test before merging) - -First Deployment -================= - -The correct template needs to be selected from above, where `` is the placeholder: - -``` -helm repo add scd-utils https://stfc.github.io/SCD-OpenStack-Utils -helm upgrade --install rabbit-consumer scd-utils/rabbit-consumer-chart -f values.yaml -f -``` - -Upgrades -======== - -Upgrades are similarly handled: -``` -helm upgrade rabbit-consumer scd-utils/rabbit-consumer-chart -f values.yaml -f -``` - -If required a version can be specified: -``` -helm upgrade rabbit-consumer scd-utils/rabbit-consumer-chart -f values.yaml -f --version -``` - -Startup -======= - -The pod may fail 1-3 times whilst the sidecar spins up, authenticates and caches the krb5 credentials. During this time the consumer will start, check for the credentials and terminate if they are not ready yet. - -The logs can be found by doing -`kubectl logs deploy/rabbit-consumers -n rabbit-consumers -c ` - -Where `` is either `kerberos` or `consumer` for the sidecar / main consumers respectively. - -Updating This Chart -========================= -If you have made changes to the Openstack-Rabbit-Consumer directory, you will need to update the version of the docker image used in this chart. -If you have updated the chart itself, you will need to update the version of the chart. But you can skip updating the image if appropriate. - -(Sister dir) -- Open a PR to bump the version of the docker image in the Openstack-Rabbit-Consumer directory. -- Once merged, the new image will be pushed to the repository. - -(This dir) -- Once a new image is available, the version in the helm chart needs to be updated. This is done by editing the `Chart.yaml` file and updating the `appVersion` field. -- Update the chart version to reflect the changes. Minor changes (such as the image version) should increment the patch version. Changes to this chart should increment the major/minor/patch according to SemVer guidance. - -Testing Locally -=============== - -Initial setup -------------- - -- Spin up minikube locally -- Install the secrets, as per the instructions above -- Make docker use the minikube docker daemon in your current shell: -`eval $(minikube docker-env)` - -Testing -------- - -- Build the docker image locally: -`docker build -t rabbit-consumer:local .` -- cd to the chart directory: -`cd ../charts/rabbit-consumer` -- Install/Upgrade the chart with your changes: -`helm install rabbit-consumers . -f values.yaml -f dev-values.yaml -n rabbit-consumers` -- To deploy a new image, rebuild and delete the existing pod: -`docker build -t rabbit-consumer:local . && kubectl delete pod -l app=rabbit-consumer -n rabbit-consumers` -- Logs can be found with: -`kubectl logs deploy/rabbit-consumers -n rabbit-consumers` diff --git a/charts/rabbit-consumer/staging-values.yaml b/charts/rabbit-consumer/staging-values.yaml deleted file mode 100644 index faf04d93..00000000 --- a/charts/rabbit-consumer/staging-values.yaml +++ /dev/null @@ -1,20 +0,0 @@ -consumer: - logLevel: DEBUG - - image: - repository: harbor.stfc.ac.uk/stfc-cloud-staging/openstack-rabbit-consumer - pullPolicy: Always - - aquilon: - defaultPrefix: vm-openstack-PreProd- - - rabbitmq: - host: dev-openstack.stfc.ac.uk - - openstack: - authUrl: https://dev-openstack.stfc.ac.uk:5000/v3 - computeUrl: https://dev-openstack.stfc.ac.uk:8774/v2.1 - projectId: c9aee696c4b54f12a645af2c951327dc - -kerberosSidecar: - principle: "HTTP/dev-service1.nubes.rl.ac.uk" diff --git a/charts/rabbit-consumer/templates/configmap.yaml b/charts/rabbit-consumer/templates/configmap.yaml deleted file mode 100644 index 6457a610..00000000 --- a/charts/rabbit-consumer/templates/configmap.yaml +++ /dev/null @@ -1,62 +0,0 @@ -# Entrypoint scripts -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ .Release.Name }}-entrypoints - namespace: {{ .Release.Namespace }} -data: -{{ (.Files.Glob "include/kinit.sh").AsConfig | indent 2 }} -{{ (.Files.Glob "include/sidecar-entrypoint.sh").AsConfig | indent 2 }} - -# File configuration ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ .Release.Name }}-kerberos-files - namespace: {{ .Release.Namespace }} -data: -{{ (.Files.Glob "include/krb5.conf").AsConfig | indent 2 }} - ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ .Release.Name }}-trusted-certs - namespace: {{ .Release.Namespace }} -data: -{{ (.Files.Glob "include/aquilon-gridpp-rl-ac-uk-chain.pem").AsConfig | indent 2 }} - -# Env configuration ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ .Release.Name }}-kerberos-env - namespace: {{ .Release.Namespace }} -data: - PERIOD_SECONDS: "{{ .Values.kerberosSidecar.refreshInterval }}" - KRB5_PRINCIPLE: {{ .Values.kerberosSidecar.principle }} - KRB5CCNAME: "/shared/krb5cc" ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ .Release.Name }}-consumer-env - namespace: {{ .Release.Namespace }} -data: - LOG_LEVEL: {{ .Values.consumer.logLevel }} - - AQ_ARCHETYPE: {{ .Values.consumer.aquilon.defaultArchetype }} - AQ_DOMAIN: {{ .Values.consumer.aquilon.defaultDomain }} - AQ_PERSONALITY: {{ .Values.consumer.aquilon.defaultPersonality }} - AQ_PREFIX: {{ .Values.consumer.aquilon.defaultPrefix }} - AQ_URL: {{ .Values.consumer.aquilon.url }} - - RABBIT_HOST: {{ .Values.consumer.rabbitmq.host }} - RABBIT_PORT: "{{ .Values.consumer.rabbitmq.port }}" - - OPENSTACK_AUTH_URL: {{ .Values.consumer.openstack.authUrl }} - OPENSTACK_COMPUTE_URL: {{ .Values.consumer.openstack.computeUrl }} - OPENSTACK_DOMAIN_NAME: {{ .Values.consumer.openstack.domainName }} - OPENSTACK_PROJECT_ID: {{ .Values.consumer.openstack.projectId }} \ No newline at end of file diff --git a/charts/rabbit-consumer/templates/deployment.yaml b/charts/rabbit-consumer/templates/deployment.yaml deleted file mode 100644 index b01cd679..00000000 --- a/charts/rabbit-consumer/templates/deployment.yaml +++ /dev/null @@ -1,114 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ .Release.Name }} - namespace: {{ .Release.Namespace }} - labels: - app: rabbit-consumer -spec: - replicas: {{ .Values.replicaCount }} - selector: - matchLabels: - app: rabbit-consumer - template: - metadata: - annotations: - # Force pod restart on configmap change - checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} - kubectl.kubernetes.io/default-container: consumer - labels: - app: rabbit-consumer - spec: - containers: - - name: kerberos - image: "{{ .Values.kerberosSidecar.image.repository }}:{{ .Values.kerberosSidecar.image.tag }}" - command: ["/bin/sh", "-c", "/etc/entrypoints.d/sidecar-entrypoint.sh"] - imagePullPolicy: Always - envFrom: - - configMapRef: - name: {{ .Release.Name }}-kerberos-env - lifecycle: - postStart: - exec: - command: ["/bin/sh", "-c", "while [ ! -f /shared/krb5cc ]; do sleep 1; done"] - - startupProbe: - exec: - command: - - /bin/sh - - -c - - klist -s - initialDelaySeconds: 10 - periodSeconds: 10 - failureThreshold: 3 - - livenessProbe: - exec: - command: - - /bin/sh - - -c - - klist -s - initialDelaySeconds: 10 - periodSeconds: 10 - failureThreshold: 3 - - volumeMounts: - - name: entrypoints - mountPath: /etc/entrypoints.d - - name: shared - mountPath: /shared - # Kerberos related - - name: kerberos-conf-files - mountPath: /etc/krb5.conf - subPath: krb5.conf - - name: kerberos-keytab - mountPath: /etc/krb5.keytab - subPath: krb5.keytab - readOnly: true - - - name: consumer - image: "{{ .Values.consumer.image.repository }}:{{ default .Chart.AppVersion .Values.consumer.image.tag }}" - imagePullPolicy: {{ .Values.consumer.image.pullPolicy }} - envFrom: - - configMapRef: - name: {{ .Release.Name }}-consumer-env - - configMapRef: - name: {{ .Release.Name }}-kerberos-env - - secretRef: - name: {{ .Values.consumer.rabbitmq.secretRef }} - - secretRef: - name: {{ .Values.consumer.openstack.secretRef }} - - volumeMounts: - - name: shared - mountPath: /shared - - name: kerberos-conf-files - mountPath: /etc/krb5.conf - subPath: krb5.conf - - name: trusted-certs - mountPath: /etc/grid-security/certificates - - hostAliases: - # Logon 04 - - ip: "130.246.132.94" - hostnames: - - "fed.cclrc.ac.uk" - - volumes: - - name: entrypoints - configMap: - name: {{ .Release.Name }}-entrypoints - defaultMode: 0755 - - name: kerberos-conf-files - configMap: - name: {{ .Release.Name }}-kerberos-files - - name: trusted-certs - configMap: - name: {{ .Release.Name }}-trusted-certs - - name: kerberos-keytab - secret: - secretName: {{ .Values.kerberosSidecar.keytabSecretRef }} - optional: false - - - name: shared - emptyDir: {} diff --git a/charts/rabbit-consumer/values.yaml b/charts/rabbit-consumer/values.yaml deleted file mode 100644 index d38baf2c..00000000 --- a/charts/rabbit-consumer/values.yaml +++ /dev/null @@ -1,34 +0,0 @@ -replicaCount: 1 -namespace: rabbit-consumer - -consumer: - logLevel: INFO - - image: - repository: harbor.stfc.ac.uk/stfc-cloud/openstack-rabbit-consumer - pullPolicy: IfNotPresent - - aquilon: - defaultArchetype: cloud - defaultDomain: prod_cloud - defaultPersonality: nubesvms - url: https://aquilon.gridpp.rl.ac.uk/private/aqd.cgi - - rabbitmq: - username: openstack - port: 5672 - secretRef: rabbit-credentials - - openstack: - secretRef: openstack-credentials - domainName: Default - -kerberosSidecar: - image: - repository: rockylinux/rockylinux - pullPolicy: IfNotPresent - tag: "9.1" - - # This secret is manually created - keytabSecretRef: rabbit-consumer-keytab - refreshInterval: 2000 # seconds